diff --git a/.Rbuildignore b/.Rbuildignore index b974b9a..2ce238f 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -10,3 +10,4 @@ ^README\.Rmd$ ^ab_scenario\.json$ ^cran-comments\.md$ +^CRAN-RELEASE$ diff --git a/CRAN-RELEASE b/CRAN-RELEASE new file mode 100644 index 0000000..39498fe --- /dev/null +++ b/CRAN-RELEASE @@ -0,0 +1,2 @@ +This package was submitted to CRAN on 2021-08-18. +Once it is accepted, delete this file and tag the release (commit 6543bdc). diff --git a/DESCRIPTION b/DESCRIPTION index 460b830..7dcf4fa 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,7 +1,7 @@ Type: Package Package: abstr Title: R Interface to the A/B Street Transport System Simulation Software -Version: 0.3.0 +Version: 0.3.0.9000 Authors@R: c(person(given = "Nathanael", family = "Sheehan", diff --git a/NEWS.md b/NEWS.md index 8943ea4..772288f 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,5 @@ +# abstr (development version) + # abstr 0.3.0 * New Vignette to reproduce the datasets based on the Montlake area in Seattle diff --git a/data-raw/exeter.r b/data-raw/exeter.r new file mode 100644 index 0000000..78f54cf --- /dev/null +++ b/data-raw/exeter.r @@ -0,0 +1,178 @@ +#### +#### +#### AIM: Use PCT data to create scenario of change where commuting cycling levels increase and car journeys decrease which can be imported +#### into A/B Street city simulation software. This method should be fully reproducible for all other pct_regions. +#### +#### + +#### LIBRARYS #### +library(pct) +library(sf) +library(tidyverse) +library(abstr) + +# for plotting +# library(tmap) +# library (mapview) + +#### READ DATA #### +devon_zones = pct::get_pct_zones(region = "devon", geography = "msoa") # get zone data +exeter_zones = devon_zones %>% filter(lad_name == "Exeter") %>% select(geo_code) # filter for exeter + +exeter_commute_od = pct::get_pct_lines(region = "devon", geography = "msoa") %>% # get commute od data + filter(lad_name1 == "Exeter" & + lad_name2 == "Exeter") # filter for exeter + + +#### CLEAN DATA , CALCULATE EUCLIDEAN DISTANCE & GENERATE SCENARIOS OF CHANGE #### +exeter_commute_od = exeter_commute_od %>% + mutate(cycle_base = bicycle) %>% + mutate(walk_base = foot) %>% + mutate(transit_base = bus + train_tube) %>% # bunch of renaming -_- + mutate(drive_base = car_driver + car_passenger + motorbike + taxi_other) %>% + mutate(all_base = all) %>% + mutate( + # create new columns + pcycle_godutch_uptake = pct::uptake_pct_godutch_2020(distance = rf_dist_km, gradient = rf_avslope_perc), + cycle_godutch_additional = pcycle_godutch_uptake * drive_base, + cycle_godutch = cycle_base + cycle_godutch_additional, + pcycle_godutch = cycle_godutch / all_base, + drive__godutch = drive_base - cycle_godutch_additional, + across(c(drive__godutch, cycle_godutch), round, 0), + all_go_dutch = drive__godutch + cycle_godutch + transit_base + walk_base + ) %>% + select( + # select variables for new df + geo_code1, + geo_code2, + cycle_base, + drive_base, + walk_base, + transit_base, + all_base, + all_go_dutch, + drive__godutch, + cycle_godutch, + cycle_godutch_additional, + pcycle_godutch + ) + +identical(exeter_commute_od$all_base, exeter_commute_od$all_go_dutch) # sanity check: make sure total remains the same (not a dynamic model where population change is factored in) + +#### DOWNLOAD OSM BUILDING DATA #### +osm_polygons = osmextract::oe_read( + "https://download.geofabrik.de/europe/great-britain/england/devon-latest.osm.pbf", + # download osm buildings for region using geofabrik + layer = "multipolygons" +) + +building_types = c( + "yes", + "house", + "detached", + "residential", + "apartments", + "commercial", + "retail", + "school", + "industrial", + "semidetached_house", + "church", + "hangar", + "mobile_home", + "warehouse", + "office", + "college", + "university", + "public", + "garages", + "cabin", + "hospital", + "dormitory", + "hotel", + "service", + "parking", + "manufactured", + "civic", + "farm", + "manufacturing", + "floating_home", + "government", + "bungalow", + "transportation", + "motel", + "manufacture", + "kindergarten", + "house_boat", + "sports_centre" +) +osm_buildings = osm_polygons %>% + dplyr::filter(building %in% building_types) %>% + dplyr::select(osm_way_id, name, building) + +osm_buildings_valid = osm_buildings[sf::st_is_valid(osm_buildings), ] + +exeter_osm_buildings_all = osm_buildings_valid[exeter_zones, ] + +#mapview(exeter_osm_buildings_all) + +# Filter down large objects for package ----------------------------------- +exeter_osm_buildings_all_joined = exeter_osm_buildings_all %>% + sf::st_join(exeter_zones) + +set.seed(2021) +exeter_osm_buildings_sample = exeter_osm_buildings_all_joined %>% + dplyr::filter(!is.na(osm_way_id)) + +exeter_osm_buildings_tbl = exeter_osm_buildings_all %>% + dplyr::filter(osm_way_id %in% exeter_osm_buildings_sample$osm_way_id) + + +#### LOGIC GATE #### +# Logic gate for go_dutch scenario of change, where cycling levels increase to a proportion reflecting the Netherlands. +#Switch to FALSE if you want census commuting OD +go_dutch = TRUE +if (go_dutch == TRUE) { + exeter_od = exeter_commute_od %>% + mutate(All = all_go_dutch) %>% + mutate(Bike = cycle_godutch) %>% + mutate(Transit = transit_base) %>% + mutate(Drive = drive_base) %>% + mutate(Walk = walk_base) %>% + select(geo_code1, geo_code2, All, Bike, Transit, Drive, Walk,geometry) +} else { + exeter_od = exeter_commute_od %>% + mutate(All = all_base) %>% + mutate(Bike = cycle_base) %>% + mutate(Drive = drive_base) %>% + mutate(Transit = transit_base) %>% + mutate(Walk = walk_base) %>% + select(geo_code1, geo_code2, All, Bike, Transit, Drive, Walk, geometry) +} + +#### GENERATE A/B STREET SCENARIO #### +output_sf = ab_scenario( + od = exeter_od, + zones = exeter_zones, + zones_d = NULL, + origin_buildings = exeter_osm_buildings_tbl, + destination_buildings = exeter_osm_buildings_tbl, + pop_var = 3, + time_fun = ab_time_normal, + output = "sf", + modes = c("Walk", "Bike", "Drive", "Transit") +) + +# make map using tmap +# tm_shape(output_sf) + tmap::tm_lines(col = "mode", lwd = .8, lwd.legeld.col = "black") + +# tm_shape(exeter_zones) + tmap::tm_borders(lwd = 1.2, col = "gray") + +# tm_text("geo_code", size = 0.6, col = "black") + +# tm_style("cobalt") + +#### SAVE JSON FILE #### +output_json = ab_json(output_sf, time_fun = ab_time_normal, scenario_name = "Exeter Example") +ab_save(output_json, f = "../../Desktop/exeter.json") + +#### COMMANDS FOR AB STREET +# $ cargo run +# $ cargo run --bin import_traffic -- --map=PATH/TO/MAP --input=/PATH/TO/JSON.json diff --git a/vignettes/abstr.Rmd b/vignettes/abstr.Rmd index a94e26b..fd7440d 100644 --- a/vignettes/abstr.Rmd +++ b/vignettes/abstr.Rmd @@ -1,13 +1,12 @@ --- -title: "Reproducing Montlake Eastside Seattle, US" +title: "Get started with abstr" output: rmarkdown::html_vignette vignette: > - %\VignetteIndexEntry{Reproducing Montlake Eastside Seattle, US} + %\VignetteIndexEntry{Get started with abstr} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} -author: -- "Robin Lovelace, Trevor Nederlof and Nathanael Sheehan" --- + ```{r, include = FALSE} knitr::opts_chunk$set( collapse = TRUE, @@ -15,172 +14,102 @@ knitr::opts_chunk$set( ) ``` -## Introduction -Lets start with the city A/B street began, Seattle. U.S. The seaport city is home to over 700,000 people, including A/B Street creator, Dustin Carlino, who has been developing tools to empirically study the impact of small changes within the road network: this means "you can transform that street parking into a bus lane or fix that pesky left turn at a traffic signal, measure the effects, then propose actually making the change". For the past two years, Seattle has been a key are of study within the A/B street simuverse and thus makes it a great starting point in understanding the utilities of `abstr` to generate site data for A/B street. - - - - - - -## Example -This example demonstrates how to wrangle the data for the three key components needsed to generate scenarios for A/B street. These components are: OD data, site zones and site buildings. With these components, the `abstr` package is ready to convert dataframes into simulations! Lets get started. - -### Load packages -```{r, eval = FALSE} -library(tidyverse) -library(sf) -library(abstr) -``` - +# Getting started -### Fetch Montlake Polygon +## Installing R -Now lets start with fetching the polygon area for Montlake. To be consistent with whats on A/B street currently, we can grab the official polygon from the github repo. Following this we can clean the data and convert it to WGS84. +To generate new scenario files to import into A/B Street with the `abstr` R package, you need to have installed a stable version of [Rstudio and R](https://www.rstudio.com/products/rstudio/). -```{r, eval = FALSE} -montlake_poly_url <- "https://raw.githubusercontent.com/a-b-street/abstreet/master/importer/config/us/seattle/montlake.poly" +## Installing A/B Street -raw_boundary_vec <- readr::read_lines(montlake_poly_url) -boundary_matrix <- raw_boundary_vec[(raw_boundary_vec != "boundary") & (raw_boundary_vec != "1") & (raw_boundary_vec != "END")] %>% - stringr::str_trim() %>% - tibble::as_tibble() %>% - dplyr::mutate(y_boundary = as.numeric(lapply(stringr::str_split(value, " "), `[[`, 1)), - x_boundary = as.numeric(lapply(stringr::str_split(value, " "), `[[`, 2))) %>% - dplyr::select(-value) %>% - as.matrix() -boundary_sf_poly <- sf::st_sf(geometry = sf::st_sfc(sf::st_polygon(list(boundary_matrix)), crs = 4326)) +Todo... -``` +## Installing Rust and the development version of A/B Street -### Parsing zones +- Install a stable version of [Rust](https://www.rust-lang.org/tools/install) -Next, we fetch zone data for the Seattle district, this comes from soundcast and needs to be parsed based on our polygon boundary. + - On Windows, you will also need [Visual Code Studio](https://code.visualstudio.com/) and [Visual Studio c++ build tools](https://visualstudio.microsoft.com/downloads/) prior to installing Rust. -```{r, eval = FALSE} -all_zones_tbl <- sf::st_read("https://raw.githubusercontent.com/psrc/soundcast/master/inputs/base_year/taz2010.geojson") %>% sf::st_transform(4326) -zones_in_boundary_tbl <- all_zones_tbl[sf::st_intersects(all_zones_tbl, boundary_sf_poly, sparse = F),] -``` +- On Linux, run `sudo apt-get install libasound2-dev libxcb-shape0-dev libxcb-xfixes0-dev libpango1.0-dev libgtk-3-dev` or the equivalent for your distribution. -### Generate OD Matrix and zones table - -Now we need to get some OD data into the mix. Finding this data for some cities can be tricky, luckily soundcast provides granular data for trips in Seattle for 2014. This data is then converted to an OD matrix and is filtered by trips that start or finish in Montlake zones. Furthermore, the data is then transformed into a wide format and filtered to only include OD entries with greater than 25 trips. Voila, the OD data is ready to go. The OD data is then parsed against all zones in the montlake area. - -```{r, eval = FALSE} -## process the disagreggated soundcast trips data -all_trips_tbl <- readr::read_csv("http://abstreet.s3-website.us-east-2.amazonaws.com/dev/data/input/us/seattle/trips_2014.csv.gz") - -## create a OD matrix -od_tbl_long <- dplyr::select(all_trips_tbl, otaz, dtaz, mode) %>% - dplyr::mutate(mode = dplyr::case_when(mode %in% c(1, 9) ~ "Walk", - mode == 2 ~ "Bike", - mode %in% c(3, 4, 5) ~ "Drive", - mode %in% c(6, 7, 8) ~ "Transit", - TRUE ~ as.character(NA))) %>% - dplyr::filter(!is.na(mode)) %>% - dplyr::group_by(otaz, dtaz, mode) %>% - dplyr::summarize(n = n()) %>% - dplyr::ungroup() %>% - # only keep an entry if the origin or destination is in a Montlake zone - dplyr::filter((otaz %in% zones_in_boundary_tbl$TAZ) | (dtaz %in% zones_in_boundary_tbl$TAZ)) - -# create a wide OD matrix and filter out any OD entries with under 25 trips in it -montlake_od_tbl <- tidyr::pivot_wider(od_tbl_long, names_from = mode, values_from = n, values_fill = 0) %>% - dplyr::rename(o_id = otaz, d_id = dtaz) %>% - dplyr::mutate(total = Drive + Transit + Bike + Walk) %>% - dplyr::filter(total >= 25) %>% - dplyr::select(-total) - -montlake_zone_tbl <- dplyr::right_join(all_zones_tbl, - tibble::tibble("TAZ" = unique(c(montlake_od_tbl$o_id, montlake_od_tbl$d_id))), - by = "TAZ") %>% - dplyr::select(TAZ) %>% - dplyr::rename(id = TAZ) -``` +- Download the A/B Street repo `git clone https://github.com/a-b-street/abstreet.git` -### Fetching OSM building data +- Fetch the minimal amount of data needed to get started `cargo run --bin updater -- --minimal` -A/B street functions by generating buildings based on OSM entries, luckily the `osmextract` makes this an easy process in R. OSM buildings must be valid `sf` objects so that they can be parsed against the zone areas. To speed things up, the later part of this chunk selects 20% of buildings in each zone. +The example below shows how `abstr` can be used. -```{r, eval = FALSE} -osm_polygons <- osmextract::oe_read("http://download.geofabrik.de/north-america/us/washington-latest.osm.pbf", layer = "multipolygons") +# Using `abstr` -building_types <- c("yes", "house", "detached", "residential", "apartments", - "commercial", "retail", "school", "industrial", "semidetached_house", - "church", "hangar", "mobile_home", "warehouse", "office", - "college", "university", "public", "garages", "cabin", "hospital", - "dormitory", "hotel", "service", "parking", "manufactured", - "civic", "farm", "manufacturing", "floating_home", "government", - "bungalow", "transportation", "motel", "manufacture", "kindergarten", - "house_boat", "sports_centre") -osm_buildings <- osm_polygons %>% - dplyr::filter(building %in% building_types) %>% - dplyr::select(osm_way_id, name, building) +Load the package as follows: -osm_buildings_valid <- osm_buildings[sf::st_is_valid(osm_buildings),] - -montlake_osm_buildings_all <- osm_buildings_valid[montlake_zone_tbl,] +```{r setup} +library(abstr) +``` -# # use to visualize the building data -# tmap::tm_shape(boundary_sf_poly) + tmap::tm_borders() + -# tmap::tm_shape(montlake_osm_buildings) + tmap::tm_polygons(col = "building") +The input datasets include `sf` objects representing buildings, origin-destination (OD) data represented as desire lines and administrative zones representing the areas within which trips in the desire lines start and end. +With the exception of OD data, each of the input datasets is readily available for most cities. +The input datasets are illustrated in the plots below, which show example data shipped in the package, taken from the Seattle, U.S. -# Filter down large objects for package ----------------------------------- -montlake_osm_buildings_all_joined <- montlake_osm_buildings_all %>% - sf::st_join(montlake_zone_tbl) +```{r input, fig.cap="Example data that can be used as an input by functions in abstr to generate trip-level scenarios that can be imported by A/B Street."} +library(abstr) +library(tmap) # for map making +tm_shape(montlake_zones) + tm_polygons(col = "grey") + + tm_shape(montlake_buildings) + tm_polygons(col = "blue") + +tm_style("classic") +``` -set.seed(2021) -# select 20% of buildings in each zone to reduce file size for this example -# remove this filter or increase the sampling to include more buildings -montlake_osm_buildings_sample <- montlake_osm_buildings_all_joined %>% - dplyr::filter(!is.na(osm_way_id)) %>% - sf::st_drop_geometry() %>% - dplyr::group_by(id) %>% - dplyr::sample_frac(0.20) %>% - dplyr::ungroup() +The map above is a graphical representation of the Montlake residential neighborhood in central Seattle, Washington. Here, `montlake_zones` represents neighborhood residential zones declared by Seattle local government and `montlake_buildings` being the accumulation of buildings listed in OpenStreetMap -montlake_osm_buildings_tbl <- montlake_osm_buildings_all %>% - dplyr::filter(osm_way_id %in% montlake_osm_buildings_sample$osm_way_id) +The final piece of the `abstr` puzzle is OD data. +```{r output-sf, message=FALSE, warning=FALSE} +head(montlake_od) ``` +In this example, the first two columns correspond to the origin and destination zones in Montlake, with the subsequent columns representing the transport mode share between these zones. -### Generate A/B Street scenarios using `abstr` +Let's combine each of the elements outlined above, the zone, building and OD data. +We do this using the `ab_scenario()` function in the `abstr` package, which generates a data frame representing tavel between the `montlake_buildings`. +While the OD data contains information on origin and destination zone, `ab_scenario()` 'disaggregates' the data and randomly selects building within each origin and destination zone to simulate travel at the individual level, as illustrated in the chunk below which uses only a sample of the `montlake_od` data, showing travel between three pairs of zones, to illustrate the process: -So now we are ready to generate simulation files. To do this, lets combine each of the elements outlined above, the zone (`montlake_zone_tbl`), building (`montlake_osm_buildings_tbl`) and OD (`montlake_od_tbl`) data. We do this using the ab_scenario() function in the abstr package, which generates a data frame representing travel between the montlake_buildings. While the OD data contains information on origin and destination zone, ab_scenario() ‘disaggregates’ the data and randomly selects building within each origin and destination zone to simulate travel at the individual level, as illustrated in the chunk below which uses only a sample of the montlake_od data, showing travel between three pairs of zones, to illustrate the process: - -```{r, eval = FALSE} -# use subset of OD data for speed +```{r message=FALSE,warning=FALSE} set.seed(42) -montlake_od_minimal = montlake_od_tbl[sample(nrow(montlake_od_tbl), size = 3), ] - -output_sf <- ab_scenario( +montlake_od_minimal = subset(montlake_od, o_id == "373" |o_id == "402" | o_id == "281" | o_id == "588" | o_id == "301" | o_id == "314") +output_sf = ab_scenario( od = montlake_od_minimal, - zones = montlake_zone_tbl, + zones = montlake_zones, zones_d = NULL, - origin_buildings = montlake_osm_buildings_tbl, - destination_buildings = montlake_osm_buildings_tbl, - # destinations2 = NULL, + origin_buildings = montlake_buildings, + destination_buildings = montlake_buildings, pop_var = 3, time_fun = ab_time_normal, output = "sf", - modes = c("Walk", "Bike", "Drive", "Transit")) + modes = c("Walk", "Bike", "Drive", "Transit") +) +``` -# # visualize the results -# tmap::tm_shape(res) + tmap::tm_lines(col="mode") + -# tmap::tm_shape(montlake_zone_tbl) + tmap::tm_borders() +The `output_sf` object created above can be further transformed to match [A/B Street's schema](https://a-b-street.github.io/docs/tech/dev/formats/scenarios.html) and visualised in A/B Street, or visualised in R (using the `tmap` package in the code chunk below): -# build json output -ab_save(ab_json(output_sf, time_fun = ab_time_normal, - scenario_name = "Montlake Example"), - f = "montlake_scenarios.json") +```{r outputplot} +tm_shape(output_sf) + tmap::tm_lines(col = "mode", lwd = .8, lwd.legeld.col = "black") + + tm_shape(montlake_zones) + tmap::tm_borders(lwd = 1.2, col = "gray") + + tm_text("id", size = 0.6) + +tm_style("cobalt") +``` + +Each line in the plot above represents a single trip, with the color representing each transport mode. Moreover, each trip is configured with an associated departure time, that can be represented in A/B Street. + +The `ab_save` and `ab_json` functions conclude the `abstr` workflow by outputting a local JSON file, matching the [A/B Street's schema](https://a-b-street.github.io/docs/tech/dev/formats/scenarios.html). + +```{r message=FALSE, warning=FALSE} +output_json = ab_json(output_sf, time_fun = ab_time_normal, scenario_name = "Montlake Example") +ab_save(output_json, f = "montlake_scenarios.json") ``` - Let's see what is in the file: ```r -file.edit("montlake_scenarios.json") +file.edit("ab_scenario.json") ``` The first trip schedule should look something like this, matching [A/B Street's schema](https://a-b-street.github.io/docs/tech/dev/formats/). @@ -212,7 +141,13 @@ The first trip schedule should look something like this, matching [A/B Street's } ``` -### Importing scenario files into A/B Street +```{r, include=FALSE} +# remove just generated .json file +file.remove("montlake_scenarios.json") +``` + +# Importing scenario files into A/B Street +![](https://user-images.githubusercontent.com/22789869/128907563-4aa95b30-a98d-4fbc-9275-97e0b30dd227.gif) In order to import scenario files into A/B Street, you will need to: @@ -234,3 +169,37 @@ cargo run --bin game -- --dev data/system/us/seattle/maps/montlake.bin ``` Once the game has booted up click on the `scenarios` tab in the top right, it will currently be set as "none". Change this to the first option "Montlake Example" which will be the scenario we have just uploaded. Alternatively, you can skip the first import command and use the GUI to select a scenario file to import. + + +# Projects supporting `abstr` + +Several open source R packages enabled the creation of `abstr`. + +These include: + +- [jsonlite](https://cran.r-project.org/web/packages/jsonlite/index.html) for awesome JSON parsing and generation +- [magrittr](https://cran.r-project.org/web/packages/magrittr/index.html) for magic `%>%` pipe-command chaining +- [sf](https://cran.r-project.org/web/packages/sf/index.html) for simple feature support +- [tibble](https://cran.r-project.org/web/packages/tibble/index.html) for strict data-frame declaration +- [tidyr](https://cran.r-project.org/web/packages/tidyr/index.html) for all things data cleaning + + +# Context + +To further understand the methods and motivations behind `abstr`, it helps to have some context. +The package builds on the ecosystem of open source software for geospatial data ([FOSS4G](https://foss4g.org/)) and packages such as [`stplanr`](https://docs.ropensci.org/stplanr) for working with transport data in R. +`stplanr` was developed to support development of the the Propensity to Cycle Tool ([PCT](https://www.pct.bike/)), an open source transport planning system for England and Wales. +The PCT [package](https://github.com/ITSLeeds/pct) enables access to the data generated by the PCT project, plus scenario of change modeling (not forecasting) at the origin-destination level, which can provide results at regions, local, route and route network levels. + +![](https://www.pct.bike/www/static/01_logos/pct-logo.png) + +The PCT provides a range of deterministic scenarios of change, such as `go_dutch` (where cycling levels matches that of the Netherlands), `gender_eq` (where there is equal levels of cycling among Female and Males) and `gov_target` (where cycling levels reflect that of UK government current targets). +An academic [paper](https://www.jtlu.org/index.php/jtlu/article/view/862) on the PCT provides further detail on the motivations for and methods underlying the project. +In 2018 the beasty `stplanr` (sustainable transport planning) package and R journal [article](https://journal.r-project.org/archive/2018/RJ-2018-053/RJ-2018-053.pdf) came on the scene, and further provided functions for solving common problems in transport planning and modeling, as well as advocating a transparency in tool usage within the transport planning paradigm. + +[![](https://raw.githubusercontent.com/ropensci/stplanr/master/man/figures/stplanr.png)](https://github.com/ropensci/stplanr) + +Finally, on the R side, in 2021 the `od` package was released which provided functions for working with origin-destination data. +A central focus in all of the packages and papers mentioned above is to provide open access transport tools to support data driven transport policies based on an objective and transparent evidence base. + +For more on the history motivating the development of the `abstr` package see the [`pct_to_abstr` vignette]() diff --git a/vignettes/montlake.Rmd b/vignettes/montlake.Rmd new file mode 100644 index 0000000..a94e26b --- /dev/null +++ b/vignettes/montlake.Rmd @@ -0,0 +1,236 @@ +--- +title: "Reproducing Montlake Eastside Seattle, US" +output: rmarkdown::html_vignette +vignette: > + %\VignetteIndexEntry{Reproducing Montlake Eastside Seattle, US} + %\VignetteEngine{knitr::rmarkdown} + %\VignetteEncoding{UTF-8} +author: +- "Robin Lovelace, Trevor Nederlof and Nathanael Sheehan" +--- +```{r, include = FALSE} +knitr::opts_chunk$set( + collapse = TRUE, + comment = "#>" +) +``` + +## Introduction +Lets start with the city A/B street began, Seattle. U.S. The seaport city is home to over 700,000 people, including A/B Street creator, Dustin Carlino, who has been developing tools to empirically study the impact of small changes within the road network: this means "you can transform that street parking into a bus lane or fix that pesky left turn at a traffic signal, measure the effects, then propose actually making the change". For the past two years, Seattle has been a key are of study within the A/B street simuverse and thus makes it a great starting point in understanding the utilities of `abstr` to generate site data for A/B street. + + + + + + +## Example +This example demonstrates how to wrangle the data for the three key components needsed to generate scenarios for A/B street. These components are: OD data, site zones and site buildings. With these components, the `abstr` package is ready to convert dataframes into simulations! Lets get started. + +### Load packages +```{r, eval = FALSE} +library(tidyverse) +library(sf) +library(abstr) +``` + + +### Fetch Montlake Polygon + +Now lets start with fetching the polygon area for Montlake. To be consistent with whats on A/B street currently, we can grab the official polygon from the github repo. Following this we can clean the data and convert it to WGS84. + +```{r, eval = FALSE} +montlake_poly_url <- "https://raw.githubusercontent.com/a-b-street/abstreet/master/importer/config/us/seattle/montlake.poly" + +raw_boundary_vec <- readr::read_lines(montlake_poly_url) +boundary_matrix <- raw_boundary_vec[(raw_boundary_vec != "boundary") & (raw_boundary_vec != "1") & (raw_boundary_vec != "END")] %>% + stringr::str_trim() %>% + tibble::as_tibble() %>% + dplyr::mutate(y_boundary = as.numeric(lapply(stringr::str_split(value, " "), `[[`, 1)), + x_boundary = as.numeric(lapply(stringr::str_split(value, " "), `[[`, 2))) %>% + dplyr::select(-value) %>% + as.matrix() +boundary_sf_poly <- sf::st_sf(geometry = sf::st_sfc(sf::st_polygon(list(boundary_matrix)), crs = 4326)) + +``` + +### Parsing zones + +Next, we fetch zone data for the Seattle district, this comes from soundcast and needs to be parsed based on our polygon boundary. + +```{r, eval = FALSE} +all_zones_tbl <- sf::st_read("https://raw.githubusercontent.com/psrc/soundcast/master/inputs/base_year/taz2010.geojson") %>% sf::st_transform(4326) +zones_in_boundary_tbl <- all_zones_tbl[sf::st_intersects(all_zones_tbl, boundary_sf_poly, sparse = F),] +``` + +### Generate OD Matrix and zones table + +Now we need to get some OD data into the mix. Finding this data for some cities can be tricky, luckily soundcast provides granular data for trips in Seattle for 2014. This data is then converted to an OD matrix and is filtered by trips that start or finish in Montlake zones. Furthermore, the data is then transformed into a wide format and filtered to only include OD entries with greater than 25 trips. Voila, the OD data is ready to go. The OD data is then parsed against all zones in the montlake area. + +```{r, eval = FALSE} +## process the disagreggated soundcast trips data +all_trips_tbl <- readr::read_csv("http://abstreet.s3-website.us-east-2.amazonaws.com/dev/data/input/us/seattle/trips_2014.csv.gz") + +## create a OD matrix +od_tbl_long <- dplyr::select(all_trips_tbl, otaz, dtaz, mode) %>% + dplyr::mutate(mode = dplyr::case_when(mode %in% c(1, 9) ~ "Walk", + mode == 2 ~ "Bike", + mode %in% c(3, 4, 5) ~ "Drive", + mode %in% c(6, 7, 8) ~ "Transit", + TRUE ~ as.character(NA))) %>% + dplyr::filter(!is.na(mode)) %>% + dplyr::group_by(otaz, dtaz, mode) %>% + dplyr::summarize(n = n()) %>% + dplyr::ungroup() %>% + # only keep an entry if the origin or destination is in a Montlake zone + dplyr::filter((otaz %in% zones_in_boundary_tbl$TAZ) | (dtaz %in% zones_in_boundary_tbl$TAZ)) + +# create a wide OD matrix and filter out any OD entries with under 25 trips in it +montlake_od_tbl <- tidyr::pivot_wider(od_tbl_long, names_from = mode, values_from = n, values_fill = 0) %>% + dplyr::rename(o_id = otaz, d_id = dtaz) %>% + dplyr::mutate(total = Drive + Transit + Bike + Walk) %>% + dplyr::filter(total >= 25) %>% + dplyr::select(-total) + +montlake_zone_tbl <- dplyr::right_join(all_zones_tbl, + tibble::tibble("TAZ" = unique(c(montlake_od_tbl$o_id, montlake_od_tbl$d_id))), + by = "TAZ") %>% + dplyr::select(TAZ) %>% + dplyr::rename(id = TAZ) +``` + +### Fetching OSM building data + +A/B street functions by generating buildings based on OSM entries, luckily the `osmextract` makes this an easy process in R. OSM buildings must be valid `sf` objects so that they can be parsed against the zone areas. To speed things up, the later part of this chunk selects 20% of buildings in each zone. + +```{r, eval = FALSE} +osm_polygons <- osmextract::oe_read("http://download.geofabrik.de/north-america/us/washington-latest.osm.pbf", layer = "multipolygons") + +building_types <- c("yes", "house", "detached", "residential", "apartments", + "commercial", "retail", "school", "industrial", "semidetached_house", + "church", "hangar", "mobile_home", "warehouse", "office", + "college", "university", "public", "garages", "cabin", "hospital", + "dormitory", "hotel", "service", "parking", "manufactured", + "civic", "farm", "manufacturing", "floating_home", "government", + "bungalow", "transportation", "motel", "manufacture", "kindergarten", + "house_boat", "sports_centre") +osm_buildings <- osm_polygons %>% + dplyr::filter(building %in% building_types) %>% + dplyr::select(osm_way_id, name, building) + +osm_buildings_valid <- osm_buildings[sf::st_is_valid(osm_buildings),] + +montlake_osm_buildings_all <- osm_buildings_valid[montlake_zone_tbl,] + +# # use to visualize the building data +# tmap::tm_shape(boundary_sf_poly) + tmap::tm_borders() + +# tmap::tm_shape(montlake_osm_buildings) + tmap::tm_polygons(col = "building") + +# Filter down large objects for package ----------------------------------- +montlake_osm_buildings_all_joined <- montlake_osm_buildings_all %>% + sf::st_join(montlake_zone_tbl) + +set.seed(2021) +# select 20% of buildings in each zone to reduce file size for this example +# remove this filter or increase the sampling to include more buildings +montlake_osm_buildings_sample <- montlake_osm_buildings_all_joined %>% + dplyr::filter(!is.na(osm_way_id)) %>% + sf::st_drop_geometry() %>% + dplyr::group_by(id) %>% + dplyr::sample_frac(0.20) %>% + dplyr::ungroup() + +montlake_osm_buildings_tbl <- montlake_osm_buildings_all %>% + dplyr::filter(osm_way_id %in% montlake_osm_buildings_sample$osm_way_id) + +``` + + +### Generate A/B Street scenarios using `abstr` + +So now we are ready to generate simulation files. To do this, lets combine each of the elements outlined above, the zone (`montlake_zone_tbl`), building (`montlake_osm_buildings_tbl`) and OD (`montlake_od_tbl`) data. We do this using the ab_scenario() function in the abstr package, which generates a data frame representing travel between the montlake_buildings. While the OD data contains information on origin and destination zone, ab_scenario() ‘disaggregates’ the data and randomly selects building within each origin and destination zone to simulate travel at the individual level, as illustrated in the chunk below which uses only a sample of the montlake_od data, showing travel between three pairs of zones, to illustrate the process: + +```{r, eval = FALSE} +# use subset of OD data for speed +set.seed(42) +montlake_od_minimal = montlake_od_tbl[sample(nrow(montlake_od_tbl), size = 3), ] + +output_sf <- ab_scenario( + od = montlake_od_minimal, + zones = montlake_zone_tbl, + zones_d = NULL, + origin_buildings = montlake_osm_buildings_tbl, + destination_buildings = montlake_osm_buildings_tbl, + # destinations2 = NULL, + pop_var = 3, + time_fun = ab_time_normal, + output = "sf", + modes = c("Walk", "Bike", "Drive", "Transit")) + +# # visualize the results +# tmap::tm_shape(res) + tmap::tm_lines(col="mode") + +# tmap::tm_shape(montlake_zone_tbl) + tmap::tm_borders() + +# build json output +ab_save(ab_json(output_sf, time_fun = ab_time_normal, + scenario_name = "Montlake Example"), + f = "montlake_scenarios.json") +``` + + +Let's see what is in the file: + +```r +file.edit("montlake_scenarios.json") +``` + +The first trip schedule should look something like this, matching [A/B Street's schema](https://a-b-street.github.io/docs/tech/dev/formats/). + +```json +{ + "scenario_name": "Montlake Example", + "people": [ + { + "trips": [ + { + "departure": 317760000, + "origin": { + "Position": { + "longitude": -122.3139, + "latitude": 47.667 + } + }, + "destination": { + "Position": { + "longitude": -122.3187, + "latitude": 47.6484 + } + }, + "mode": "Walk", + "purpose": "Shopping" + } + ] + } +``` + +### Importing scenario files into A/B Street + +In order to import scenario files into A/B Street, you will need to: + +* Install a stable version of [Rust](https://www.rust-lang.org/tools/install) + + On Windows, you will also need [Visual Code Studio](https://code.visualstudio.com/) and [Visual Studio c++ build tools](https://visualstudio.microsoft.com/downloads/) prior to installing Rust. +* On Linux, run `sudo apt-get install libasound2-dev libxcb-shape0-dev libxcb-xfixes0-dev libpango1.0-dev libgtk-3-dev` or the equivalent for your distribution. +* Download the A/B Street repo `git clone https://github.com/a-b-street/abstreet.git` +* Fetch the minimal amount of data needed to get started `cargo run --bin updater -- --minimal` + +Once you have all of this up and running, you will be able to run the scenario import. To start, open up a terminal in Visual Studio or your chosen IDE. Next edit the following command to include the local path of your scenario.json file. +``` +cargo run --bin import_traffic -- --map=data/system/us/seattle/maps/montlake.bin --input=/path/to/input.json +``` + +Given you have correctly set the file path, the scenario should now be imported into your local version of the Montlake map. Next you can run the following command to start the A/B Street simulation in Montlake. + +``` +cargo run --bin game -- --dev data/system/us/seattle/maps/montlake.bin +``` + +Once the game has booted up click on the `scenarios` tab in the top right, it will currently be set as "none". Change this to the first option "Montlake Example" which will be the scenario we have just uploaded. Alternatively, you can skip the first import command and use the GUI to select a scenario file to import. diff --git a/vignettes/pct_to_abstr.Rmd b/vignettes/pct_to_abstr.Rmd new file mode 100644 index 0000000..cd0a7e8 --- /dev/null +++ b/vignettes/pct_to_abstr.Rmd @@ -0,0 +1,373 @@ +--- +title: "Visualising cycling potential with A/B Street" +output: rmarkdown::html_vignette +vignette: > + %\VignetteIndexEntry{Visualising cycling potential with A/B Street} + %\VignetteEngine{knitr::rmarkdown} + %\VignetteEncoding{UTF-8} +author: +- "Nathanael Sheehan and Robin Lovelace" +--- + +```{r, include = FALSE} +knitr::opts_chunk$set( + collapse = TRUE, + comment = "#>" +) +``` + +# Introduction + +The `abstr` package was originally developed as part of the ActDev project, a tool that provides evidence on active travel provision and potential in and around planned and proposed development sites. + + +The [ActDev](https://actdev.cyipt.bike/) website demonstrates the potential for new developments to support walking and cycling by visualising data generated using reproducible R [code](https://github.com/cyipt/actdev/tree/main/code). +A key challenge was to create a simulation for each of the case study sites based on the input origin-destination datasets. +To overcome this challenge, A/B Street developers were commissioned to extend the ActDev tool to enable real time simulation of scenarios of change. +At the time there was no way to get the OD data we had in the R world into the A/B Street world. +This was motivation for creating the `abstr` R package. +As outlined in the package's README, the package's main job is to take origin-destination data from `.csv` files (and other tabular file types) and outputs `.json` files that can be imported and visualised in A/B Street. + +Given you have all of the above, you are ready to start transforming data-frames into simulations! +So lets set an aim for the vignette + +```{r eval=FALSE} +#### +#### +#### AIM: Use PCT data to create scenario of change where commuting cycling levels increase and car journeys decrease which can be imported +#### into A/B Street city simulation software. This method should be fully reproducible for all other pct_regions. +#### +#### +``` + +While the PCT is a powerful and popular tool for strategic cycleway planning it has some key limitations that are addressed by A/B Street: + +- The PCT only provides data on scenarios of behaviour change, providing little for people who want to visualise and explore scenarios of infrastructure change +- The PCT does not allow the user to edit the road network to help design the pro-cycling and traffic-reducing interventions that are needed to enable more people to cycle safely +- It does not provide evidence at high zoom levels, being designed for strategic planning + + +This vignette aims to demonstrate how data from the PCT, which provides evidence-based visions of how cycling could become the natural choice for urban travel, can be visualised in A/B Street to overcome the limitations outlined above. +The first stage is to install the necessary packages (see the [`abstr`](https://a-b-street.github.io/abstr/articles/abstr.html) vignette for a detailed introduction to software requirements). + +# Installing and Loading Packages + +To begin with, you need to install and load the necessary packages for this vignette. + +```{r eval=FALSE} +#### INSTALL PACKAGES #### +cran_pkgs = c("abstr","pct","osmextract","sf","tidyverse") +remotes::install_cran(cran_pkgs) +#### LOAD PACKAGES #### +library(abstr) +library(pct) +library(osmextract) +library(sf) +library(tidyverse) +``` + +# Choosing a region + +Data in the PCT for England and Wales is divided into regions, which can be seen below. + +```{r} +pct::pct_regions$region_name +``` + +To run the example below, replace `devon` with a different region from the list above (warning, this may not work for large regions). + +```{r} +region_name = "devon" +``` + +You can select a specific local authority of interest from those available in the region of interest. +You can check which are available in your region of interest as follows: + +```{r} +table(pct_regions_lookup$lad16nm[pct_regions_lookup$region_name == region_name]) +``` + +For the purposes of this article we will use Exeter as the case study: + +```{r} +lad_name = "Exeter" +``` + +# Fetching PCT Data + +Next you want to fetch two types of PCT data. +Firstly, zone data, which is gathered using the `get_pct_zones()` function and is filtered to only include a local authority of choice, in this example we use Exeter. +Secondly, commute data, which is gathered using the `get_pct_lines()` function and is also filtered to only include trips within the local authority. + +```{r eval=FALSE} +#### READ DATA #### +devon_zones = get_pct_zones(region = region_name, geography = "msoa") # get zone data +# filter for exeter +exeter_zones = devon_zones %>% filter(lad_name == lad_name) %>% + select(geo_code) +# get commute od data +exeter_commute_od = get_pct_lines(region = region_name, geography = "msoa") %>% + filter(lad_name1 == lad_name & lad_name2 == lad_name) # filter for exeter +``` + +# Data Cleaning and Transformation + +Now you have your data, its time to clean and transform it. +In fact, you only need to transform the `exeter_commute_od` dataframe as the `exeter_zones` is already in `abstr` format. +The first step in cleaning the data requires renaming variables so that we can clearly see the difference between the base scenario and the scenario of change. +Next, you calculate the scenario of change, in this example we use the `uptake_pct_godutch_2020()` function which takes two arguments of `distance` and `gradient` in its model calculation. +The results from this PCT function allow you to calculate the mode shift from driving to cycling. +Finally, we subset the data to only include the columns which are needed to progress. + +```{r eval=FALSE} +exeter_commute_od = exeter_commute_od %>% + mutate(cycle_base = bicycle) %>% + mutate(walk_base = foot) %>% + mutate(transit_base = bus + train_tube) %>% # bunch of renaming -_- + mutate(drive_base = car_driver + car_passenger + motorbike + taxi_other) %>% + mutate(all_base = all) %>% + mutate( + # create new columns + pcycle_godutch_uptake = uptake_pct_godutch_2020(distance = rf_dist_km, gradient = rf_avslope_perc), + cycle_godutch_additional = pcycle_godutch_uptake * drive_base, + cycle_godutch = cycle_base + cycle_godutch_additional, + pcycle_godutch = cycle_godutch / all_base, + drive__godutch = drive_base - cycle_godutch_additional, + across(c(drive__godutch, cycle_godutch), round, 0), + all_go_dutch = drive__godutch + cycle_godutch + transit_base + walk_base + ) %>% + select( + # select variables for new df + geo_code1, + geo_code2, + cycle_base, + drive_base, + walk_base, + transit_base, + all_base, + all_go_dutch, + drive__godutch, + cycle_godutch, + cycle_godutch_additional, + pcycle_godutch + ) +``` + +As a quick sanity check we can make sure our model has not generated any new commutes and we still have the same base number of commuters as before. + +```{r eval=FALSE} +# sanity check: ensure total remains the same +# (this is not a dynamic model where population change is factored in) +identical(exeter_commute_od$all_base, exeter_commute_od$all_go_dutch) +``` + +# Download OSM building data + +Now, you need to download OSM building data to populate the AB Street simulation map. +In this example we use the `osmextract` package to fetch a PBF (protocolbuffer binary format) file hosted on GeoFabrik. +You then need to filter the contents of the PBF file to only include the defined building types and subset the data to only include `osm_way_id, name, building` columns. +Following this, you should ensure you only include valid sf buildings and then aggregate the building data against the zone boundary. + +```{r eval=FALSE} +#### DOWNLOAD OSM BUILDING DATA #### +osm_polygons = osmextract::oe_read( + "https://download.geofabrik.de/europe/great-britain/england/devon-latest.osm.pbf", + # download osm buildings for region using geofabrik + layer = "multipolygons" +) + +building_types = c( + "yes", + "house", + "detached", + "residential", + "apartments", + "commercial", + "retail", + "school", + "industrial", + "semidetached_house", + "church", + "hangar", + "mobile_home", + "warehouse", + "office", + "college", + "university", + "public", + "garages", + "cabin", + "hospital", + "dormitory", + "hotel", + "service", + "parking", + "manufactured", + "civic", + "farm", + "manufacturing", + "floating_home", + "government", + "bungalow", + "transportation", + "motel", + "manufacture", + "kindergarten", + "house_boat", + "sports_centre" +) +osm_buildings = osm_polygons %>% + filter(building %in% building_types) %>% + select(osm_way_id, name, building) + +osm_buildings_valid = osm_buildings[sf::st_is_valid(osm_buildings), ] + +exeter_osm_buildings_all = osm_buildings_valid[exeter_zones, ] +``` + +Subsequently, you can join the OSM buildings data with the `exeter_zones` geography in order to create the complete building table. +This table is filtered to not include any `NA's` and is aggregated to only include + +```{r eval = FALSE} +#### JOIN OSM BUILDINGS WITH ZONE DATA #### +exeter_osm_buildings_all_joined = exeter_osm_buildings_all %>% + sf::st_join(exeter_zones) + +exeter_osm_buildings_sample = exeter_osm_buildings_all_joined %>% + filter(!is.na(osm_way_id)) + +exeter_osm_buildings_tbl = exeter_osm_buildings_all %>% + filter(osm_way_id %in% exeter_osm_buildings_sample$osm_way_id) +``` + +# Using Abstr to Generate Scenarios + +You now have everything in place to generate AB Street scenarios for both our base commute rate and our go_active commute rate. +However, `abstr` takes a strict column name definition as to adhere to the AB street documentation. +This means you need to rename mode columns for scenario generation. +In order to make things easy, we can create a simple logic gate that renames our mode columns depending on the boolean value `go_active`. + +```{r eval = FALSE} +set.seed(2021) # for reproducible builds +#### LOGIC GATE #### +# Logic gate for go_dutch scenario of change, where cycling levels increase to a proportion reflecting the Netherlands. +# Switch to FALSE if you want census commuting OD +go_dutch = TRUE +if (go_dutch == TRUE) { + exeter_od = exeter_commute_od %>% + mutate(All = all_go_dutch) %>% + mutate(Bike = cycle_godutch) %>% + mutate(Transit = transit_base) %>% + mutate(Drive = drive_base) %>% + mutate(Walk = walk_base) %>% + select(geo_code1, geo_code2, All, Bike, Transit, Drive, Walk,geometry) +} else { + exeter_od = exeter_commute_od %>% + mutate(All = all_base) %>% + mutate(Bike = cycle_base) %>% + mutate(Drive = drive_base) %>% + mutate(Transit = transit_base) %>% + mutate(Walk = walk_base) %>% + select(geo_code1, geo_code2, All, Bike, Transit, Drive, Walk, geometry) +} +``` + +Voila, you are ready to generate simulation files from your data-frames. +Lets start by using the `ab_scenario()` function with our `exeter_od`, `exeter_zones` and `exeter_osm_buildings_tbl` data-frames. + +```{r eval = FALSE} +#### GENERATE A/B STREET SCENARIO #### +output_sf = ab_scenario( + od = exeter_od, + zones = exeter_zones, + zones_d = NULL, + origin_buildings = exeter_osm_buildings_tbl, + destination_buildings = exeter_osm_buildings_tbl, + pop_var = 3, + time_fun = ab_time_normal, + output = "sf", + modes = c("Walk", "Bike", "Drive", "Transit") +) +``` + +To conclude you will need to generate a `JSON` format using the `ab_json()` function and then save the json file to your local machine using the `ab_save()` function. +(You can download this [file](https://github.com/a-b-street/abstr/releases/download/6543bdc/dutch.json) from the repo's releases.) + +```{r eval = FALSE} +#### SAVE JSON FILE #### +output_json = ab_json(output_sf, time_fun = ab_time_normal, scenario_name = "Go Dutch") +ab_save(output_json, f = "dutch.json") +``` + +```{r, eval=FALSE, echo=FALSE} +# Upload the json file for future reference +piggyback::pb_upload("dutch.json") +piggyback::pb_download_url("dutch.json") +``` + +# Importing scenario files into A/B Street + +Given you have all of this up and running, you will be able to run the scenario import. +AB Street contains 44 sites in England which appear in the ActDev project. +If the local authority you have generated a scenario for is not included, follow the AB Street [documentation](https://a-b-street.github.io/docs/user/new_city.html) to import a new city. +Next, fire up a terminal in Visual Studio or your chosen Rust IDE with the `abstreet` repo as the working directory run A/B Street as follows: + +```bash +cargo run --bin game --release +``` + +From there you can select the area for which data should be downloaded +(see the A/B Street [docs on importing](https://a-b-street.github.io/docs/tech/map/importing/index.html) data for details). + + + +Assuming you have the necessary map data the command to import the `.json` file is as follows: + +```bash +cargo run --bin import_traffic -- --map=PATH/TO/MAP --input=/PATH/TO/JSON.json +``` + +If you used the Exeter case study and your data is located in the root directory, you can import the traffic as follows: + +```bash +cargo run --bin import_traffic -- \ + --map=data/system/gb/exeter_red_cow_village/maps/center.bin \ + --input=dutch.json +``` + +Once this is done, you can fire up Fire up the AB street simulation software using the command: + +```bash +cargo run --bin game -- --dev PATH/TO/MAP +``` + +which in the case of the Exeter example shown above is + +```bash +cargo run --bin game -- --dev data/system/gb/exeter_red_cow_village/maps/center.bin +``` + +Once the game has booted up click on the `scenarios` tab in the top right, it will currently be set as "none". + +![](https://user-images.githubusercontent.com/1825120/131586308-212caee2-3ebe-48f4-8e15-39efacedf3b4.png) + +Change this to the first option to "Go Dutch", or whatever you called the scenario you have just uploaded, as shown below. + +![](https://user-images.githubusercontent.com/1825120/131586413-c91cb53e-9b19-4d98-8e7b-d46a2f6a5845.png) + + +After selecting your scenario, you should see something like this, a Go Dutch scenario of cycling taking place before your eyes, making the OD data frames come to life in a real time simulation! + +![](https://user-images.githubusercontent.com/1825120/131586989-6ac002e0-55b0-4124-817e-4fa999421100.gif) + +Then let your eyes wonder on the simulation you have created and let your imagination explore the possibilities of transforming your local area into an active travel utopia. + +# Conclusions and next steps + +This vignette is by no means simple, and if you get stuck please raise an issue in the [Github](https://github.com/a-b-street/abstr/issues). +If you succeed in generating a simulation for chosen city please share it on social media tagging the authors `abstr` so we can see how you have used the methods/data. +If you are looking to extend this work presented in this vignette, why not try: + +- building a simulation with a different uptake model +- integrating school commute data from the PCT package +- build your own uptake model