Skip to content

Commit

Permalink
Fix serialization, changing it to wrap MapModel (which has the additi…
Browse files Browse the repository at this point in the history
…onal census/POI fields) instead. #17

Not yet updating the CLI...
  • Loading branch information
dabreegster committed Sep 1, 2024
1 parent 29cd7dc commit 99becd8
Show file tree
Hide file tree
Showing 9 changed files with 79 additions and 94 deletions.
5 changes: 4 additions & 1 deletion backend/src/amenity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,18 @@ use geo::{Coord, Point};
use geojson::{Feature, GeoJson, Geometry};
use graph::{Graph, Mode, Timer};
use osm_reader::OsmID;
use serde::{Deserialize, Serialize};
use utils::{Mercator, Tags};

#[derive(Serialize, Deserialize)]
pub struct Amenities {
pub amenities: Vec<Amenity>,
// Indexed by RoadID. These're broken down this way because the 3 graphs look different and
// could snap to different roads in each case
pub per_road: Vec<EnumMap<Mode, Vec<AmenityID>>>,
}

#[derive(Serialize, Deserialize)]
pub struct Amenity {
pub id: AmenityID,
pub osm_id: OsmID,
Expand All @@ -27,7 +30,7 @@ pub struct Amenity {
pub cuisine: Option<String>,
}

#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct AmenityID(pub usize);

impl Amenities {
Expand Down
72 changes: 32 additions & 40 deletions backend/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use chrono::NaiveTime;
use geo::{Coord, LineString};
use geojson::{de::deserialize_geometry, Feature, GeoJson, Geometry};
use graph::{Graph, Mode, Timer};
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*;

use crate::amenity::Amenities;
Expand All @@ -25,6 +25,7 @@ static START: Once = Once::new();

// TODO Rename
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct MapModel {
graph: Graph,
zones: Zones,
Expand All @@ -33,12 +34,9 @@ pub struct MapModel {

#[wasm_bindgen]
impl MapModel {
/// If is_osm is true, expect bytes of an osm.pbf or osm.xml string. Otherwise, expect a
/// bincoded graph
#[wasm_bindgen(constructor)]
pub async fn new(
input_bytes: &[u8],
is_osm: bool,
gtfs_url: Option<String>,
population_url: Option<String>,
progress_cb: Option<js_sys::Function>,
Expand All @@ -50,40 +48,17 @@ impl MapModel {
});

let mut timer = Timer::new("build graph", progress_cb);
let mut amenities = Amenities::new();
let modify_roads = |_roads: &mut Vec<graph::Road>| {};
let graph = if is_osm {
let mut graph = Graph::new(input_bytes, &mut amenities, modify_roads, &mut timer)
.map_err(err_to_js)?;

graph
.setup_gtfs(
match gtfs_url {
Some(url) => graph::GtfsSource::Geomedea(url),
None => graph::GtfsSource::None,
},
&mut timer,
)
.await
.map_err(err_to_js)?;

graph
} else {
bincode::deserialize_from(input_bytes).map_err(err_to_js)?
};
amenities.finalize(&graph, &mut timer);
// TODO Serialize this too
let zones = Zones::load(population_url, &graph.mercator, &mut timer)
let model = MapModel::create(input_bytes, gtfs_url, population_url, &mut timer)
.await
.map_err(err_to_js)?;

timer.done();

Ok(MapModel {
graph,
zones,
amenities,
})
Ok(model)
}

#[wasm_bindgen(js_name = loadFile)]
pub fn load_file(input_bytes: &[u8]) -> Result<MapModel, JsValue> {
bincode::deserialize_from(input_bytes).map_err(err_to_js)
}

/// Returns a GeoJSON string. Just shows the full network
Expand Down Expand Up @@ -242,16 +217,33 @@ impl MapModel {
}
}

// Non WASM methods
// TODO Reconsider these. Benchmark should use Graph. MapModel should just be a thin WASM layer.
// Non WASM methods, also used by the CLI
impl MapModel {
pub fn from_graph_bytes(input_bytes: &[u8]) -> Result<MapModel, JsValue> {
let graph = bincode::deserialize_from(input_bytes).map_err(err_to_js)?;
pub async fn create(
input_bytes: &[u8],
gtfs_url: Option<String>,
population_url: Option<String>,
timer: &mut Timer,
) -> anyhow::Result<MapModel> {
let mut amenities = Amenities::new();
amenities.finalize(&graph, &mut Timer::new("deserialize graph", None));
let modify_roads = |_roads: &mut Vec<graph::Road>| {};
let mut graph = Graph::new(input_bytes, &mut amenities, modify_roads, timer)?;

graph
.setup_gtfs(
match gtfs_url {
Some(url) => graph::GtfsSource::Geomedea(url),
None => graph::GtfsSource::None,
},
timer,
)
.await?;
amenities.finalize(&graph, timer);
let zones = Zones::load(population_url, &graph.mercator, timer).await?;

Ok(MapModel {
graph,
zones: Zones::empty(),
zones,
amenities,
})
}
Expand Down
51 changes: 27 additions & 24 deletions backend/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,48 +3,51 @@ use std::io::BufWriter;

use anyhow::Result;

use backend::{Graph, GtfsSource, Timer};
use backend::MapModel;
use graph::{GtfsModel, Timer};

// TODO Don't need tokio multithreading, but fighting config to get single working
/// This is a CLI tool to build a Graph file with GTFS data, for later use in the web app. This
/// gives a perf benefit (faster to load a pre-built graph) and allows GTFS use (no practical way
/// to read a huge GTFS file or clip it from web). The downside is having to manually manage these
/// prebuilt files as the format changes -- which is why, unlike in A/B Street, this'll just be for
/// manual testing for now.
/// This is a CLI tool to build a MapModel file, for later use in the web app or CLI. This gives a
/// perf benefit (faster to load a pre-built graph), but manually managing these prebuilt files as
/// the format changes is tedious. That's why, unlike in A/B Street, this'll just be for manual
/// testing for now.
#[tokio::main]
async fn main() -> Result<()> {
simple_logger::init_with_level(log::Level::Info).unwrap();

let args: Vec<String> = std::env::args().collect();
if args.len() < 3 || (args[1] != "graph" && args[1] != "gmd") {
if args.len() != 3 || (args[1] != "graph" && args[1] != "gmd") {
println!("Usage: one of these:");
println!("To make a graph.bin: graph osm.pbf [gtfs_directory]");
println!("To make a graph.bin: graph osm.pbf");
println!("To make a gtfs.gmd: gmd gtfs_directory");
std::process::exit(1);
}

// TODO Hardcoded for now
let population_url = Some("https://assets.od2net.org/population.fgb".to_string());

if args[1] == "graph" {
let timer = Timer::new("build graph", None);
let osm_bytes = std::fs::read(&args[2])?;
let gtfs = match args.get(3) {
Some(dir) => GtfsSource::Dir(dir.to_string()),
None => GtfsSource::None,
};

let graph = Graph::new(&osm_bytes, gtfs, population_url, timer).await?;
let writer = BufWriter::new(File::create("graph.bin")?);
bincode::serialize_into(writer, &graph)?;
} else if args[1] == "gmd" {
if args[1] == "gmd" {
let mut timer = Timer::new("build geomedea from gtfs", None);
timer.step("parse GTFS");
let model = backend::GtfsModel::parse(&args[2], None)?;
let model = GtfsModel::parse(&args[2], None)?;
timer.step("turn into geomedea");
model.to_geomedea("gtfs.gmd")?;
timer.done();
return Ok(());
}

let mut timer = Timer::new("build model", None);
let osm_bytes = std::fs::read(&args[2])?;
let model = MapModel::create(
&osm_bytes,
// TODO Hardcoded, or could we read from local files at least?
Some("https://assets.od2net.org/gtfs.gmd".to_string()),
Some("https://assets.od2net.org/population.fgb".to_string()),
&mut timer,
)
.await?;

timer.step("Writing");
let writer = BufWriter::new(File::create("model.bin")?);
bincode::serialize_into(writer, &model)?;

timer.done();
Ok(())
}
13 changes: 4 additions & 9 deletions backend/src/zone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,21 @@ use flatgeobuf::{FeatureProperties, FgbFeature, GeozeroGeometry, HttpFgbReader};
use geo::{Area, MultiPolygon, Polygon};
use geojson::{Feature, FeatureCollection, Geometry};
use rstar::{primitives::GeomWithData, RTree};
use serde::{Deserialize, Serialize};
use utils::Mercator;

use crate::Timer;

#[derive(Serialize, Deserialize)]
pub struct Zones {
pub zones: Vec<Zone>,
pub rtree: RTree<GeomWithData<Polygon, ZoneID>>,
}

#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct ZoneID(pub usize);

#[derive(Serialize, Deserialize)]
pub struct Zone {
// TODO Maybe split these upfront, including area and population, and just store in the RTree?
// TODO Do these need to be Mercator?
Expand All @@ -25,14 +28,6 @@ pub struct Zone {
}

impl Zones {
// TODO Remove when benchmarks don't use WASM layer
pub fn empty() -> Self {
Self {
zones: Vec::new(),
rtree: RTree::new(),
}
}

pub async fn load(
population_url: Option<String>,
mercator: &Mercator,
Expand Down
4 changes: 2 additions & 2 deletions graph/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ use utils::{Mercator, Tags};
pub use self::route::Route;
use self::route::Router;
pub use self::timer::Timer;
use crate::gtfs::TripID;
use crate::gtfs::{GtfsModel, StopID};
pub use crate::gtfs::GtfsModel;
use crate::gtfs::{StopID, TripID};

#[derive(Serialize, Deserialize)]
pub struct Graph {
Expand Down
2 changes: 1 addition & 1 deletion web/src/RouteMode.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@
<label>
<input type="checkbox" bind:checked={$showRouteBuffer} />
Buffer around route (minutes)
<input type="number" bind:value={$bufferMins} min="1" max="30" />
<input type="number" bind:value={$bufferMins} min="1" max="60" />
</label>
{#if $showRouteBuffer}
<label>
Expand Down
2 changes: 1 addition & 1 deletion web/src/UploadRouteMode.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
<label>
<input type="checkbox" bind:checked={$showRouteBuffer} />
Buffer around route (minutes)
<input type="number" bind:value={$bufferMins} min="1" max="30" />
<input type="number" bind:value={$bufferMins} min="1" max="60" />
</label>
{#if $showRouteBuffer}
<label>
Expand Down
12 changes: 6 additions & 6 deletions web/src/title/MapLoader.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@
loading = [];
}
let graphFileInput: HTMLInputElement;
async function loadGraphFile(e: Event) {
let modelFileInput: HTMLInputElement;
async function loadModelFile(e: Event) {
try {
loading = ["Loading pre-built file"];
let buffer = await graphFileInput.files![0].arrayBuffer();
let buffer = await modelFileInput.files![0].arrayBuffer();
console.time("load");
await $backend!.loadGraphFile(new Uint8Array(buffer));
await $backend!.loadModelFile(new Uint8Array(buffer));
console.timeEnd("load");
$isLoaded = true;
Expand Down Expand Up @@ -157,7 +157,7 @@

<div>
<label>
Load a pre-built graph.bin file:
<input bind:this={graphFileInput} on:change={loadGraphFile} type="file" />
Load a pre-built model.bin file:
<input bind:this={modelFileInput} on:change={loadModelFile} type="file" />
</label>
</div>
12 changes: 2 additions & 10 deletions web/src/worker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,25 +27,17 @@ export class Backend {

this.inner = await new MapModel(
osmBytes,
true,
gtfsUrl,
populationUrl,
progressCb,
);
}

async loadGraphFile(graphBytes: Uint8Array) {
async loadModelFile(graphBytes: Uint8Array) {
// TODO Do we need to do this only once?
await init();

// No progress worth reporting for this
this.inner = await new MapModel(
graphBytes,
false,
undefined,
undefined,
undefined,
);
this.inner = await MapModel.loadFile(graphBytes);
}

isLoaded(): boolean {
Expand Down

0 comments on commit 99becd8

Please sign in to comment.