flake8
This commit is contained in:
parent
37ecbd0bd2
commit
db01442d9b
2
.github/workflows/etl.yml
vendored
2
.github/workflows/etl.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
|||||||
python -m pip install -r requirements.txt
|
python -m pip install -r requirements.txt
|
||||||
- name: Run Flake8
|
- name: Run Flake8
|
||||||
run: |
|
run: |
|
||||||
ls etl/*py | grep -v 'join_building_data' | xargs flake8
|
ls etl/*py | grep -v 'join_building_data' | xargs flake8 --exclude etl/__init__.py
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
python -m pytest
|
python -m pytest
|
@ -4,13 +4,13 @@
|
|||||||
"""
|
"""
|
||||||
import csv
|
import csv
|
||||||
import glob
|
import glob
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
csv.field_size_limit(sys.maxsize)
|
csv.field_size_limit(sys.maxsize)
|
||||||
|
|
||||||
|
|
||||||
def main(mastermap_path):
|
def main(mastermap_path):
|
||||||
mm_paths = sorted(glob.glob(os.path.join(mastermap_path, "*.gml.csv")))
|
mm_paths = sorted(glob.glob(os.path.join(mastermap_path, "*.gml.csv")))
|
||||||
for mm_path in mm_paths:
|
for mm_path in mm_paths:
|
||||||
@ -19,7 +19,8 @@ def main(mastermap_path):
|
|||||||
|
|
||||||
|
|
||||||
def filter_mastermap(mm_path):
|
def filter_mastermap(mm_path):
|
||||||
output_path = "{}.filtered.csv".format(str(mm_path).replace(".gml.csv", ""))
|
output_path = "{}.filtered.csv"
|
||||||
|
output_path.format(str(mm_path).replace(".gml.csv", ""))
|
||||||
output_fieldnames = ('WKT', 'fid', 'descriptiveGroup')
|
output_fieldnames = ('WKT', 'fid', 'descriptiveGroup')
|
||||||
# Open the input csv with all polygons, buildings and others
|
# Open the input csv with all polygons, buildings and others
|
||||||
with open(mm_path, 'r') as fh:
|
with open(mm_path, 'r') as fh:
|
||||||
@ -32,7 +33,8 @@ def filter_mastermap(mm_path):
|
|||||||
try:
|
try:
|
||||||
if 'Building' in line['descriptiveGroup']:
|
if 'Building' in line['descriptiveGroup']:
|
||||||
w.writerow(line)
|
w.writerow(line)
|
||||||
except TypeError: # when descriptiveGroup is missing, ignore this Polygon
|
# when descriptiveGroup is missing, ignore this Polygon
|
||||||
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,9 +25,10 @@ gdf = osmnx.footprints_from_point(point=point, dist=dist)
|
|||||||
|
|
||||||
# preview image
|
# preview image
|
||||||
gdf_proj = osmnx.projection.project_gdf(gdf, to_crs={'init': 'epsg:3857'})
|
gdf_proj = osmnx.projection.project_gdf(gdf, to_crs={'init': 'epsg:3857'})
|
||||||
gdf_proj = gdf_proj[gdf_proj.geometry.apply(lambda g: g.geom_type != 'MultiPolygon')]
|
gdf_proj = gdf_proj[gdf_proj.geometry.apply(lambda g: g.geom_type != 'MultiPolygon')] # noqa
|
||||||
|
|
||||||
fig, ax = osmnx.plot_footprints(gdf_proj, bgcolor='#333333', color='w', figsize=(4,4),
|
fig, ax = osmnx.plot_footprints(gdf_proj, bgcolor='#333333',
|
||||||
|
color='w', figsize=(4, 4),
|
||||||
save=True, show=False, close=True,
|
save=True, show=False, close=True,
|
||||||
filename='test_buildings_preview', dpi=600)
|
filename='test_buildings_preview', dpi=600)
|
||||||
|
|
||||||
@ -50,7 +51,13 @@ gdf_to_save.rename(
|
|||||||
# convert to CSV
|
# convert to CSV
|
||||||
test_data_csv = str(os.path.join(test_dir, 'test_buildings.3857.csv'))
|
test_data_csv = str(os.path.join(test_dir, 'test_buildings.3857.csv'))
|
||||||
subprocess.run(["rm", test_data_csv])
|
subprocess.run(["rm", test_data_csv])
|
||||||
subprocess.run(["ogr2ogr", "-f", "CSV", test_data_csv, test_data_geojson, "-lco", "GEOMETRY=AS_WKT"])
|
subprocess.run(
|
||||||
|
["ogr2ogr", "-f", "CSV", test_data_csv,
|
||||||
|
test_data_geojson, "-lco", "GEOMETRY=AS_WKT"]
|
||||||
|
)
|
||||||
|
|
||||||
# add SRID for ease of loading to PostgreSQL
|
# add SRID for ease of loading to PostgreSQL
|
||||||
subprocess.run(["sed", "-i", "s/^\"POLYGON/\"SRID=3857;POLYGON/", test_data_csv])
|
subprocess.run(
|
||||||
|
["sed", "-i", "s/^\"POLYGON/\"SRID=3857;POLYGON/",
|
||||||
|
test_data_csv]
|
||||||
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user