Skip to content

Commit

Permalink
Merge
Browse files Browse the repository at this point in the history
  • Loading branch information
janbaykara committed Dec 18, 2024
2 parents 6e109c9 + 2c77ff4 commit 0ba23ff
Show file tree
Hide file tree
Showing 27 changed files with 706 additions and 272 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ __pycache__
data/**/*
!data/areas.psql.zip
!data/.gitkeep
!data/preview_env_seed.zip
!data/areas.psql.zip
.next

conf/env
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/bin/bash

if [ "$ENVIRONMENT" = "staging" ]; then
unzip data/preview_env_seed.zip -d data
cat data/preview_env_seed.psql | python manage.py dbshell
unzip data/areas.psql.zip -d data
cat data/areas.psql | python manage.py dbshell
else
echo "This command runs only in the staging environment."
fi
Binary file modified data/areas.psql.zip
Binary file not shown.
Binary file removed data/preview_env_seed.zip
Binary file not shown.
86 changes: 60 additions & 26 deletions hub/graphql/mutations.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
from enum import Enum
from typing import List, Optional

from django.utils.text import slugify
from django.utils.timezone import now

import strawberry
import strawberry_django
from asgiref.sync import async_to_sync
from graphql import GraphQLError
from strawberry import auto
from strawberry.field_extensions import InputMutationExtension
from strawberry.types.info import Info
Expand Down Expand Up @@ -133,12 +133,16 @@ class ExternalDataSourceAction:


@strawberry.mutation(extensions=[IsAuthenticated()])
async def trigger_update(external_data_source_id: str) -> ExternalDataSourceAction:
data_source = await models.ExternalDataSource.objects.aget(
id=external_data_source_id
async def trigger_update(
info: Info, external_data_source_id: str
) -> ExternalDataSourceAction:
data_source: models.ExternalDataSource = (
await models.ExternalDataSource.objects.aget(id=external_data_source_id)
)
# Use this ID to track all jobs against it
request_id = str(uuid.uuid4())

batch_request = await BatchRequest.objects.acreate(user=get_current_user(info))
request_id = str(batch_request.id)

await data_source.schedule_refresh_all(request_id=request_id)
return ExternalDataSourceAction(id=request_id, external_data_source=data_source)

Expand All @@ -164,40 +168,70 @@ def create_map_report(info: Info, data: MapReportInput) -> models.MapReport:

date_time_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")

# Check for duplicate layers in existing reports
if "layers" in data.display_options:
display_layers = data.display_options["layers"]
for layer in display_layers:
layer_id = layer["id"]
# Query for existing report with the same layer
existing_report = models.MapReport.objects.filter(
layers__contains=[{"id": layer_id}]
).first()
if existing_report:
report_link = f"/reports/{existing_report.id}"
raise GraphQLError(
f"A map for this data source already exists. "
f'You can view it <a className="underline" href="{report_link}" target="_blank">here</a>'
)

# Prepare base parameters
params = {
**graphql_type_to_dict(data, delete_null_keys=True),
**{
"organisation": organisation,
"slug": data.slug or slugify(data.name),
"name": f"New map ({date_time_str})", # Default name for reports
"name": data.name or f"New map ({date_time_str})",
"display_options": data.display_options or {},
},
}

map_report = models.MapReport.objects.create(**params)
if existing_reports:
return map_report

# If this is the first report, add the user's first member list to it
member_list = (
model_types.ExternalDataSource.get_queryset(
models.ExternalDataSource.objects.get_queryset(),
info,
)
.filter(data_type=models.ExternalDataSource.DataSourceType.MEMBER)
.first()
)
if member_list:
map_report.name = f"Auto-generated report on {member_list.name}"
if "layers" in data.display_options:
display_layers = data.display_options["layers"]
map_report.layers = [
{
"id": str(uuid.uuid4()),
"name": member_list.name,
"source": str(member_list.id),
"visible": True,
"id": layer["id"],
"name": layer["name"],
"source": layer["source"],
"visible": layer["visible"],
"custom_marker_text": layer.get("custom_marker_text"),
}
for layer in display_layers
]
map_report.save()

map_report.save()
# If this is the first report, add the user's first member list to it
if not data.layers and not existing_reports:
member_list = (
model_types.ExternalDataSource.get_queryset(
models.ExternalDataSource.objects.get_queryset(),
info,
)
.filter(data_type=models.ExternalDataSource.DataSourceType.MEMBER)
.first()
)
if member_list:
map_report.name = f"Auto-generated report on {member_list.name}"
map_report.layers = [
{
"id": str(uuid.uuid4()),
"name": member_list.name,
"source": str(member_list.id),
"visible": True,
}
]
map_report.save()

return map_report


Expand Down
1 change: 1 addition & 0 deletions hub/graphql/types/model_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,7 @@ class GroupedData:
gss: Optional[str]
area_data: Optional[strawberry.Private[Area]] = None
imported_data: Optional[JSON] = None
area_type_filter: Optional["AreaTypeFilter"] = None

@strawberry_django.field
async def gss_area(self, info: Info) -> Optional[Area]:
Expand Down
108 changes: 108 additions & 0 deletions hub/management/commands/export_areas_as_sql.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any

from django.conf import settings

# from django postgis
from django.core.management.base import BaseCommand
from django.db import connection

from psycopg import Column


@dataclass
class TableConfig:
table_name: str
exclude_columns: list[str] = field(default_factory=list)
extra_select_columns: dict[str, str] = field(default_factory=dict)
output_column_templates: dict[str, str] = field(default_factory=dict)


TABLES = [
TableConfig(table_name="hub_areatype"),
TableConfig(
table_name="hub_area",
extra_select_columns={
"area_type_code": "(SELECT code FROM hub_areatype WHERE hub_areatype.id = hub_area.area_type_id)"
},
output_column_templates={
"area_type_id": "(SELECT id FROM hub_areatype WHERE code = '{area_type_code}')"
},
),
]


class Command(BaseCommand):
help = """
Export the area table from the database as an SQL file that can be imported into any environment
without causing primary key conflicts.
"""

def handle(self, *args, **options):
print("Exporting areas and area types from current database to data/areas.psql")
count = 0
output_file: Path = settings.BASE_DIR / "data" / "areas.psql"
with output_file.open("w", encoding="utf8") as f:
for table_config in TABLES:
rows, columns = self.do_query(table_config)
for row in rows:
output_record = self.get_output_record(row, columns, table_config)
column_names = ",".join(output_record.keys())
output_values = ",".join(output_record.values())
f.write(
f"INSERT INTO {table_config.table_name} ({column_names}) VALUES ({output_values});\n"
)
count += 1
print(f"Exported {count} rows to data/areas.psql")

def do_query(
self, table_config: TableConfig
) -> tuple[list[tuple[Any]], tuple[Column]]:
with connection.cursor() as cursor:
select = "*"
for column_name, column_select in table_config.extra_select_columns.items():
select += f", {column_select} as {column_name}"
cursor.execute(
f"SELECT {select} FROM {table_config.table_name} ORDER BY id ASC"
)
rows = cursor.fetchall()
columns = cursor.description
return (rows, columns)

def escape_sql_string(self, value: Any) -> str:
if value is None:
return "NULL"
# Replace single quote with two single quotes
return str(value).replace("'", "''")

def get_output_record(
self, row: tuple[Any], columns: tuple[Column], table_config: TableConfig
) -> dict[str, str]:
record = {}
for i, column in enumerate(columns):
# Don't output ID columns or extra select columns (these can't be imported)
if (
column.name == "id"
or column.name in table_config.extra_select_columns
or column.name in table_config.exclude_columns
):
continue

if column.name in table_config.output_column_templates:
template = table_config.output_column_templates[column.name]
value = self.template_output_value(template, row, columns)
else:
# output the value as a string, cast to the correct type in postgres
value = f"'{self.escape_sql_string(row[i])}'::{column.type_display}"

record[column.name] = value
return record

def template_output_value(
self, template: str, row: tuple[Any], columns: tuple[Column]
) -> str:
value = template
for i, column in enumerate(columns):
value = value.replace("{" + column.name + "}", str(row[i]))
return value
96 changes: 54 additions & 42 deletions hub/management/commands/import_areas.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
from time import sleep

# from django postgis
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon
Expand Down Expand Up @@ -36,7 +37,7 @@ class Command(BaseCommand):
"description": "District Council",
},
{
"mapit_type": ["COI", "CPW", "DIW", "LBW", "LGW", "MTW", "UTW"],
"mapit_type": ["COI", "CPW", "DIW", "LBW", "LGW", "MTW", "UTE", "UTW"],
"name": "Wards",
"code": "WD23",
"area_type": "Electoral Ward",
Expand All @@ -48,13 +49,17 @@ def add_arguments(self, parser):
parser.add_argument(
"-q", "--quiet", action="store_true", help="Silence progress bars."
)
parser.add_argument(
"-a",
"--all-names",
action="store_true",
help="Fetch alternative names from MapIt",
)

def handle(self, quiet: bool = False, *args, **options):
mapit_client = mapit.MapIt()
def handle(self, quiet: bool = False, all_names: bool = False, *args, **options):
self.mapit_client = mapit.MapIt()
for b_type in self.boundary_types:
areas = mapit_client.areas_of_type(
b_type["mapit_type"], {"min_generation": 10}
)
areas = self.mapit_client.areas_of_type(b_type["mapit_type"])
area_type, created = AreaType.objects.get_or_create(
name=b_type["name"],
code=b_type["code"],
Expand All @@ -66,42 +71,49 @@ def handle(self, quiet: bool = False, *args, **options):
print(f"Importing {b_type['name']} Areas")
for area in tqdm(areas, disable=quiet):
try:
geom = mapit_client.area_geometry(area["id"])
geom = {
"type": "Feature",
"geometry": geom,
"properties": {
"PCON13CD": area["codes"]["gss"],
"name": area["name"],
"type": b_type["code"],
"mapit_type": area["type"],
},
}
geom_str = json.dumps(geom)
except mapit.NotFoundException: # pragma: no cover
print(f"could not find mapit area for {area['name']}")
geom = None
self.import_area(area, area_type, all_names)
except mapit.RateLimitException:
print("Rate limited, sleeping for 3 minutes then retrying...")
sleep(180)
self.import_area(area, area_type)

a, created = Area.objects.update_or_create(
gss=area["codes"]["gss"],
area_type=area_type,
defaults={
"mapit_id": area["id"],
"name": area["name"],
"mapit_type": area["type"],
},
)
def import_area(self, area, area_type, all_names):
area_details = self.mapit_client.area_details(area["id"]) if all_names else {}
try:
geom = self.mapit_client.area_geometry(area["id"])
geom = {
"type": "Feature",
"geometry": geom,
"properties": {
"PCON13CD": area["codes"]["gss"],
"name": area["name"],
"type": area_type.code,
"mapit_type": area["type"],
},
}
geom_str = json.dumps(geom)
except mapit.NotFoundException: # pragma: no cover
print(f"could not find mapit area for {area['name']}")
geom = None

a, created = Area.objects.update_or_create(
gss=area["codes"]["gss"],
area_type=area_type,
defaults={
"mapit_id": area["id"],
"name": area["name"],
"mapit_type": area["type"],
"mapit_all_names": area_details.get("all_names"),
},
)

if geom is not None:
geos = json.dumps(geom["geometry"])
geom = GEOSGeometry(geos)
if isinstance(geom, Polygon):
geom = MultiPolygon([geom])
if geom is not None:
geos = json.dumps(geom["geometry"])
geom = GEOSGeometry(geos)
if isinstance(geom, Polygon):
geom = MultiPolygon([geom])

a.geometry = geom_str
a.polygon = geom
a.point = a.polygon.centroid
a.save()
except KeyError: # pragma: no cover
# Ignore areas without a GSS code
pass
a.geometry = geom_str
a.polygon = geom
a.point = a.polygon.centroid
a.save()
Loading

0 comments on commit 0ba23ff

Please sign in to comment.