Aggregate codes of the new approach to develop the process' workflow

This commit is contained in:
Alireza Adli 2024-08-23 15:53:42 -04:00
parent 8a37075c11
commit 042913acdc

View File

@ -1,63 +1,30 @@
""" """
handle_varennes_ds_workflow module handle_varennes_ds_workflow module
The workflow of cleaning and updating the buildings around NRCan datalayer has two polygons for each buildings' footprint.
Varennes Public Library dataset. The below workflow has been designed to remove the extra polygons.
Project Developer: Alireza Adli alireza.adli@concordia.ca Project Developer: Alireza Adli alireza.adli@concordia.ca
""" """
# You need to clone mtl_gis_oo project and
# add it as a dependency of this new project
from scrub_layer_class import * from scrub_layer_class import *
# Change the paths by the location of your QGIS installation and datalayers
qgis_path = 'C:/Program Files/QGIS 3.34.1/apps/qgis' qgis_path = 'C:/Program Files/QGIS 3.34.1/apps/qgis'
nrcan_varennes = 'C:/Users/a_adli/PycharmProjects/varennes_gis_oo/' \ varennes_nrcan_extra_polygons = \
'data/initial_data/endeavor/nrcan_centroid_main/' \ 'C:/Users/a_adli/PycharmProjects/varennes_gis_oo/' \
'nrcan_centroid_main.shp' 'data/initial_data/endeavor/nrcan_centroid_main/nrcan_centroid_main.shp'
duplcated = 'C:/Users/a_adli/PycharmProjects/varennes_gis_oo/' \ # first we duplicate the layer to preserve the main data layer.
'data/initial_data/endeavor/nrcan_centroids_1/' \ duplicated = \
'auto_building_2.shp' 'C:/Users/a_adli/PycharmProjects/varennes_gis_oo/' \
'data/initial_data/endeavor/nrcan_centroids_1/auto_building_2.shp'
# First, the layer will be duplicated as some records are going to be removed # First, the layer will be duplicated as some records are going to be removed
varennes_nrcan = ScrubLayer(qgis_path, nrcan_varennes, 'NRCan Varennes') varennes_nrcan = ScrubLayer(
qgis_path, varennes_nrcan_extra_polygons, 'NRCan Varennes')
varennes_nrcan_duplicate = varennes_nrcan.duplicate_layer( varennes_nrcan_duplicate = varennes_nrcan.duplicate_layer(
duplcated, 'NRCan duplicated') duplicated, 'NRCan duplicated')
# Deleting duplicated layer unnecessary fields (will be added later)
varennes_nrcan_duplicate.delete_field('acqtech')
varennes_nrcan_duplicate.delete_field('acqtech_en')
varennes_nrcan_duplicate.delete_field('acqtech_fr')
varennes_nrcan_duplicate.delete_field('provider')
varennes_nrcan_duplicate.delete_field('provider_e')
varennes_nrcan_duplicate.delete_field('provider_f')
varennes_nrcan_duplicate.delete_field('datemin')
varennes_nrcan_duplicate.delete_field('datemax')
varennes_nrcan_duplicate.delete_field('haccmin')
varennes_nrcan_duplicate.delete_field('haccmax')
varennes_nrcan_duplicate.delete_field('vaccmin')
varennes_nrcan_duplicate.delete_field('vaccmax')
varennes_nrcan_duplicate.delete_field('heightmin')
varennes_nrcan_duplicate.delete_field('heightmax')
varennes_nrcan_duplicate.delete_field('elevmin')
varennes_nrcan_duplicate.delete_field('elevmax')
varennes_nrcan_duplicate.delete_field('bldgarea')
varennes_nrcan_duplicate.delete_field('comment')
tolerance = 5
varennes_nrcan_features = varennes_nrcan.layer.getFeatures()
varennes_nrcan_duplicate_features = \
varennes_nrcan_duplicate.layer.getFeatures()
for feature in varennes_nrcan_features:
for next_feature in varennes_nrcan_duplicate_features:
if feature['feature_id'] == next_feature['feature_id']:
continue
if abs(feature['centroid_x'] - next_feature['centroid_x']) < tolerance\
or \
abs(
feature['centroid_y'] - next_feature['centroid_y']
) < tolerance:
record_id = next_feature.id()
varennes_nrcan_duplicate.delete_record_by_index(record_id)