Display official planning data.

This commit is contained in:
Mateusz Konieczny 2022-10-05 19:52:14 +02:00
parent 2bdbbce8d2
commit 8cbb3cd84b
21 changed files with 491 additions and 8 deletions

View File

@ -313,6 +313,28 @@
<PolygonSymbolizer fill="#73ebaf" />
</Rule>
</Style>
<Style name="planning_applications_status">
<Rule>
<Filter>[status] = "Submitted"</Filter>
<PolygonSymbolizer fill="#00ffff"/>
</Rule>
<Rule>
<Filter>[status] = "Approved"</Filter>
<PolygonSymbolizer fill="#00ff00"/>
</Rule>
<Rule>
<Filter>[status] = "Appeal In Progress"</Filter>
<PolygonSymbolizer fill="#ffff00"/>
</Rule>
<Rule>
<Filter>[status] = "Refused"</Filter>
<PolygonSymbolizer fill="#ff0000"/>
</Rule>
<Rule>
<Filter>[status] = "Withdrawn"</Filter>
<PolygonSymbolizer fill="#999999"/>
</Rule>
</Style>
<Style name="planning_combined">
<Rule>
<Filter>[planning_in_conservation_area] = true</Filter>

View File

@ -116,6 +116,23 @@ const getBuildingUPRNsById = asyncController(async (req: express.Request, res: e
}
});
// GET building planning data
const getBuildingPlanningDataById = asyncController(async (req: express.Request, res: express.Response) => {
const buildingId = processParam(req.params, 'building_id', parsePositiveIntParam, true);
try {
const result = await buildingService.getBuildingPlanningDataById(buildingId);
if (typeof (result) === 'undefined') {
return res.send({ error: 'Database error' });
}
res.send({data: result, buildingId: buildingId});
} catch(error) {
console.error(error);
res.send({ error: 'Database error' });
}
});
const getBuildingUserAttributesById = asyncController(async (req: express.Request, res: express.Response) => {
if(!req.session.user_id) {
return res.send({ error: 'Must be logged in'});
@ -202,6 +219,7 @@ export default {
getBuildingById,
updateBuildingById,
getBuildingUPRNsById,
getBuildingPlanningDataById,
getUserVerifiedAttributes,
verifyBuildingAttributes,
getBuildingEditHistoryById,

View File

@ -26,6 +26,7 @@ router.route('/:building_id.json')
// GET building UPRNs
router.get('/:building_id/uprns.json', buildingController.getBuildingUPRNsById);
router.get('/:building_id/planning_data.json', buildingController.getBuildingPlanningDataById);
// POST verify building attribute
router.route('/:building_id/verify.json')

View File

@ -56,4 +56,5 @@ export * from './edit';
export * from './history';
export * from './query';
export * from './uprn';
export * from './planningData';
export * from './verify';

View File

@ -0,0 +1,16 @@
import db from '../../../db';
export async function getBuildingPlanningDataById(id: number) {
try {
return await db.any(
'SELECT building_properties.uprn, building_properties.building_id, planning_data.description, planning_data.status, planning_data.uprn, planning_data.planning_application_id, planning_application_link, to_char(planning_data.decision_date, \'YYYY-MM-DD\') AS decision_date, to_char(planning_data.last_synced_date, \'YYYY-MM-DD\') AS last_synced_date, planning_data.data_source, planning_data.data_source_link \
FROM building_properties \
INNER JOIN planning_data ON \
building_properties.uprn = planning_data.uprn WHERE building_id = $1',
[id]
);
} catch(error) {
console.error(error);
return undefined;
}
}

View File

@ -21,12 +21,14 @@ export function useBuildingData(buildingId: number, preloadedData: Building, inc
return;
}
try {
let [building, buildingUprns] = await Promise.all([
let [building, buildingUprns, planningData] = await Promise.all([
apiGet(`/api/buildings/${buildingId}.json${includeUserAttributes ? '?user_attributes=true' : ''}`),
apiGet(`/api/buildings/${buildingId}/uprns.json`)
apiGet(`/api/buildings/${buildingId}/uprns.json`),
apiGet(`/api/buildings/${buildingId}/planning_data.json`)
]);
building.uprns = buildingUprns.uprns;
building.planning_data = planningData.data; // TODO use planningData?
building = Object.assign(building, {...building.user_attributes});
delete building.user_attributes;

View File

@ -0,0 +1,87 @@
import React, { Fragment } from 'react';
import DataTitle from './data-title';
import InfoBox from '../../components/info-box';
import CheckboxDataEntry from '../data-components/checkbox-data-entry';
interface PlanningDataOfficialDataEntryProps {
value: any; // TODO: proper structuring!
}
const {useState} = React;
const LongText = ({ content,limit}) => {
const [showAll, setShowAll] = useState(false);
const showMore = () => setShowAll(true);
const showLess = () => setShowAll(false);
if (content.length <= limit) {
return <div>{content}</div>
}
if (showAll) {
return <div>
{content}
<b onClick={showLess}>shorten description</b>
</div>
}
const toShow = content.substring(0, limit).trim() + "... ";
return <div>
{toShow}
<b onClick={showMore}>show full description</b>
</div>
}
const PlanningDataOfficialDataEntry: React.FC<PlanningDataOfficialDataEntryProps> = (props) => {
const data = props.value || [];
if(data.length == 0) {
return (<Fragment>
<InfoBox type='success'>
<DataTitle
title={"Planning Application Status"}
tooltip={null}
/>
<div>Disclaimer: data is imported from the official source, but Planning London DataHub is known to be incomplete.</div>
<b>No live planning data available currently for this building polygon via the Planning London DataHub.</b>
</InfoBox>
</Fragment>);
}
return (
<Fragment>
<InfoBox type='success'>
<Fragment>
<DataTitle
title={"Planning Application Status"}
tooltip={null}
/>
<div>Disclaimer: data is imported from the official source, but Planning London DataHub is known to be incomplete.</div>
<b>Data source:</b> <a href={data[0]["data_source_link"]}>{data[0]["data_source"]}</a>
<br/>
<b>Planning application ID:</b> {data[0]["planning_application_id"]}
<br/>
<b>Most recent update by data provider:</b> {data[0]["last_synced_date"]}
<br/>
<b>Planning application ID:</b> {data[0]["planning_application_id"]}
<br/>
<b>Current planning application status for this site:</b> {data[0]["status"]}
<br/>
<b>Decision date</b>: {data[0]["decision_date"].toString()}
<br/>
<b>Brief Description of proposed work</b>: <LongText content = {data[0]["description"]} limit = {400}/>
<br/>
<CheckboxDataEntry
title="Show conservation area layer (Ian Hall dataset)"
slug="planning_recent_outcome"
value={null}
disabled={true}
/>
</Fragment>
</InfoBox>
</Fragment>
);
};
export default PlanningDataOfficialDataEntry;

View File

@ -8,17 +8,18 @@ import { DataEntryGroup } from '../data-components/data-entry-group';
import SelectDataEntry from '../data-components/select-data-entry';
import Verification from '../data-components/verification';
import withCopyEdit from '../data-container';
import PlanningDataOfficialDataEntry from '../data-components/planning-data-entry';
import { CategoryViewProps } from './category-view-props';
/**
* Planning view/edit section
*/
const PlanningView: React.FunctionComponent<CategoryViewProps> = (props) => (
<Fragment>
<InfoBox type='warning'>
This section is under development as part of the project CLPV Tool. For more details and progress <a href="https://github.com/colouring-cities/manual/wiki/G2.-Data-capture-(2).-Live-streaming-and-automated-methods">read here</a>.
</InfoBox>
<PlanningDataOfficialDataEntry
value={props.building.planning_data}
/>
<DataEntry
title={dataFields.planning_portal_link.title}
slug="planning_portal_link"

View File

@ -2,7 +2,7 @@ import React from 'react';
interface InfoBoxProps {
msg?: string;
type?: 'info' | 'warning'
type?: 'info' | 'warning' | 'success'
}
const InfoBox: React.FC<InfoBoxProps> = ({msg, children, type = 'info'}) => (

View File

@ -167,7 +167,21 @@ export const categoryMapsConfig: {[key in Category]: CategoryMapDefinition[]} =
}
}
],
[Category.Planning]: [{
[Category.Planning]: [
{
mapStyle: 'planning_applications_status',
legend: {
title: 'Planning applications',
elements: [
{ color: '#00ffff', text: 'Submitted' },
{ color: '#00ff00', text: 'Approved' },
{ color: '#ffff00', text: 'Appeal In Progress' },
{ color: '#ff0000', text: 'Refused' },
{ color: '#999999', text: 'Withdrawn' },
]
}
},
{
mapStyle: 'planning_combined',
legend: {
title: 'Designation/protection',

View File

@ -170,6 +170,14 @@ export const dataFields = { /* eslint-disable @typescript-eslint/camelcase */
example: [{uprn: "", parent_uprn: "" }, {uprn: "", parent_uprn: "" }],
},
planning_data: {
category: Category.Location,
title: "PLANNING DATA",
tooltip: "PLANNING DATA",
example: [{}],
},
ref_osm_id: {
category: Category.Location,
title: "OSM ID",

View File

@ -11,6 +11,7 @@ export type BuildingMapTileset = 'date_year' |
'community_local_significance_total' |
'community_expected_planning_application_total' |
'community_in_public_ownership' |
'planning_applications_status' |
'planning_combined' |
'sust_dec' |
'building_attachment_form' |

View File

@ -7,6 +7,7 @@ import serialize from 'serialize-javascript';
import {
getBuildingById,
getBuildingUPRNsById,
getBuildingPlanningDataById,
getLatestRevisionId,
getUserVerifiedAttributes
} from './api/services/building/base';
@ -33,13 +34,14 @@ const frontendRoute = asyncController(async (req: express.Request, res: express.
}
try {
let [user, building, uprns, userVerified, latestRevisionId] = await Promise.all([
let [user, building, uprns, planningData, userVerified, latestRevisionId] = await Promise.all([
userId ? getUserById(userId) : undefined,
isBuilding ? getBuildingById(
buildingId,
{ userDataOptions: userId ? { userId, userAttributes: true } : null }
) : undefined,
isBuilding ? getBuildingUPRNsById(buildingId) : undefined,
isBuilding ? getBuildingPlanningDataById(buildingId) : undefined,
(isBuilding && userId) ? getUserVerifiedAttributes(buildingId, userId) : {},
getLatestRevisionId()
]);
@ -53,6 +55,9 @@ const frontendRoute = asyncController(async (req: express.Request, res: express.
if (data.building != null) {
data.building.uprns = uprns;
}
if (data.building != null) {
data.building.planning_data = planningData;
}
data.latestRevisionId = latestRevisionId;
renderHTML(context, data, req, res);
} catch(error) {

View File

@ -136,6 +136,11 @@ const LAYER_QUERIES = {
WHERE
community_public_ownership IS NOT NULL
`,
planning_applications_status: `SELECT
buildings.geometry_id, building_properties.uprn, building_properties.building_id, planning_data.status AS status, planning_data.uprn
FROM building_properties
INNER JOIN planning_data ON building_properties.uprn = planning_data.uprn
INNER JOIN buildings ON building_properties.building_id = buildings.building_id`,
planning_combined: `
SELECT
geometry_id,

2
etl/planning_data/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*.json
*.sql

View File

@ -0,0 +1,33 @@
Following scripts should be scheduled to run regularly to load livestream data into database.
```
# querying API to obtain data
python3 obtain_livestream_data.py > all_data.json
# loading data into Colouring database
python3 load_into_database
# removing tile cache for planning_applications_status layer - note that location of cache depends on your configuration
rm /srv/colouring-london/tilecache/planning_applications_status/* -rf
```
As loading into databases expects environment variables to be set, one option to actually schedult it in a cron is something like
```
export $(cat ~/scripts/.env | xargs) && /usr/bin/python3 ~/colouring-london/etl/planning_data/load_into_database.py
```
with
```
~/script/.env
```
being in following format
```
PGHOST=localhost
PGDATABASE=colouringlondondb
PGUSER=cldbadmin
PGPASSWORD=actualpassword
```

View File

@ -0,0 +1,109 @@
import json
import datetime
import psycopg2
import os
def get_connection():
return psycopg2.connect(
host=os.environ['PGHOST'],
dbname=os.environ['PGDATABASE'],
user=os.environ['PGUSER'],
password=os.environ['PGPASSWORD']
)
def filepath():
return os.path.dirname(os.path.realpath(__file__)) + os.sep + "data.json"
def insert_entry(connection, e):
elements = []
application_url = "NULL"
if e["application_url"] != None:
application_url = "'" + e["application_url"] + "'"
with connection.cursor() as cur:
cur.execute('''INSERT INTO
planning_data (planning_application_id, planning_application_link, description, decision_date, last_synced_date, status, data_source, data_source_link, uprn)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s)
''', (e["application_id"], application_url, e["description"], e["decision_date"], e["last_synced_date"], e["status"], e["data_source"], e["data_source_link"], e["uprn"]))
connection.commit()
return """INSERT INTO planning_data
(planning_application_id, planning_application_link, description, decision_date, last_synced_date, status, data_source, data_source_link, uprn)
VALUES""" + ",\n".join(elements) + ";"
def parse_date_string_into_datestring(incoming):
date = None
try:
date = datetime.datetime.strptime(incoming, "%d/%m/%Y") # '21/07/2022'
except ValueError:
date = datetime.datetime.strptime(incoming, "%Y-%m-%dT%H:%M:%S.%fZ") # '2022-08-08T20:07:22.238Z'
return datetime.datetime.strftime(date, "%Y-%m-%d")
def shorten_description(original_description):
description = original_description.strip()
limit = 400
if len(description) > limit:
description = ""
for entry in original_description.split():
extended = description
if extended != "":
extended += " "
extended += entry
if len(extended) <= limit:
description = extended
if description == "":
description = description[0:limit]
description += "... <i>(show more)</i>"
return description
def main():
connection = get_connection()
with connection.cursor() as cur:
cur.execute("TRUNCATE planning_data")
with open(filepath(), 'r') as content_file:
data = json.load(content_file)
if data['rawResponse']['timed_out']:
raise Exception("query getting livestream data has failed")
if data['is_partial']:
raise Exception("query getting livestream data has failed")
if data['is_running']:
raise Exception("query getting livestream data has failed")
for entry in data['rawResponse']['hits']['hits']:
description = shorten_description(entry['_source']['description'])
application_id = entry['_source']['id']
decision_date = parse_date_string_into_datestring(entry['_source']['decision_date'])
last_synced_date = parse_date_string_into_datestring(entry['_source']['last_synced'])
uprn = entry['_source']['uprn']
status = entry['_source']['status']
if status in ["No Objection to Proposal (OBS only)", "Not Required", None, "Lapsed", "Unknown", "SECS", "Comment Issued"]:
continue
if status in []:
opts = jsbeautifier.default_options()
opts.indent_size = 2
print(jsbeautifier.beautify(json.dumps(entry), opts))
continue
if status == "Refused":
status = "Rejected"
if status == "Appeal Received":
status = "Appeal In Progress"
if (status not in ["Approved", "Rejected", "Appeal In Progress", "Withdrawn", ]):
raise Exception("Unexpected status " + status)
description = entry['_source']['description'].strip()
if uprn == None:
continue
entry = {
"description": description,
"decision_date": decision_date,
"last_synced_date": last_synced_date,
"application_id": application_id,
"application_url": entry['_source']['url_planning_app'],
"uprn": uprn,
"status": status,
"data_source": "The Planning London DataHub Greater London Authority",
"data_source_link": "https://data.london.gov.uk/dataset/planning-london-datahub?_gl=1%2aprwpc%2a_ga%2aMzQyOTg0MjcxLjE2NTk0NDA4NTM", # TODO test
}
insert_entry(connection, entry)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,55 @@
from requests.structures import CaseInsensitiveDict
import requests
import time
import json
def obtain_data(data):
url = "https://planningdata.london.gov.uk/dashboard/internal/search/es"
headers = headers_of_query()
response = requests.post(url, headers=headers, data=json.dumps(data))
# typically initially return something like that
# {'id': 'Fmo0RW9DX0k5U3UtLWJIVlEtMzRwR3cfdGwtYkJaaHNUeG1GdF9kRHFtQldaUToxODczMzM5Nw==', 'is_partial': True, 'is_running': True, 'rawResponse': {'took': 100, 'timed_out': False, 'terminated_early': False, 'num_reduce_phases': 0, '_shards': {'total': 1, 'successful': 0, 'skipped': 0, 'failed': 0}, 'hits': {'total': 0, 'max_score': None, 'hits': []}}, 'total': 1, 'loaded': 0}
if response.status_code != 200:
raise Exception("unexpected status code " + str(response.status_code))
output = response.content.decode('utf-8')
output = json.loads(output)
if output["is_partial"]:
identifier = output["id"]
while output["is_partial"]:
time.sleep(3)
response = reask_for_query_results(identifier)
output = json.loads(response.content.decode('utf-8'))
if response.status_code != 200:
raise Exception("unexpected status code " +
str(response.status_code))
return output
def headers_of_query():
headers = CaseInsensitiveDict()
headers["User-Agent"] = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:93.0) Gecko/20100101 Firefox/93.0"
headers["Accept"] = "*/*"
headers["Referer"] = "https://planningdata.london.gov.uk/dashboard/app/discover"
headers["Content-Type"] = "application/json"
headers["kbn-version"] = "7.9.3"
headers["Origin"] = "https://planningdata.london.gov.uk"
headers["Connection"] = "keep-alive"
headers["Sec-Fetch-Dest"] = "empty"
headers["Sec-Fetch-Mode"] = "cors"
headers["Sec-Fetch-Site"] = "same-origin"
headers["TE"] = "trailers"
return headers
def reask_for_query_results(identifier):
data = {'id': identifier}
return requests.post(
'https://planningdata.london.gov.uk/dashboard/internal/search/es',
data=data,
timeout=100000,
headers={
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:93.0) Gecko/20100101 Firefox/93.0',
'kbn-version': '7.9.3',
}
)

View File

@ -0,0 +1,87 @@
import json
import jsbeautifier
import make_query
def main():
output = make_query.obtain_data(get_query())
# print(json.dumps(output))
opts = jsbeautifier.default_options()
opts.indent_size = 2
print(jsbeautifier.beautify(json.dumps(output), opts))
def get_query():
true = True # makes possible to copy JSON into Python code
return {
"params": {
"ignoreThrottled": true,
"index": "applications",
"body": {
"version": true,
"size": 500,
"sort": [
{
"last_updated": {
"order": "desc",
"unmapped_type": "boolean"
}
}
],
"aggs": {
"2": {
"date_histogram": {
"field": "last_updated",
"calendar_interval": "1d",
"time_zone": "Europe/Warsaw",
"min_doc_count": 1
}
}
},
"stored_fields": [
"*"
],
"script_fields": {},
"docvalue_fields": [],
"_source": {
"excludes": []
},
"query": {
"bool": {
"must": [],
"filter": [
{
"range": {
"decision_date": {
"gte": "1922-01-01T00:00:00.000Z",
"format": "strict_date_optional_time"
}
}
}
],
"should": [],
"must_not": []
}
},
"highlight": {
"pre_tags": [
"@kibana-highlighted-field@"
],
"post_tags": [
"@/kibana-highlighted-field@"
],
"fields": {
"*": {}
},
"fragment_size": 2147483647
}
},
"rest_total_hits_as_int": true,
"ignore_unavailable": true,
"ignore_throttled": true,
"timeout": "30000ms"
}
}
if __name__ == '__main__':
main()

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS planning_data;

View File

@ -0,0 +1,15 @@
CREATE TABLE IF NOT EXISTS planning_data (
-- internal unique id
planning_entry_id serial PRIMARY KEY,
-- assigned by planning authority
planning_application_id VARCHAR(50),
planning_application_link VARCHAR(260),
description VARCHAR,
decision_date date,
last_synced_date date,
status VARCHAR(20),
data_source VARCHAR(70),
data_source_link VARCHAR(150),
uprn bigint
);