From ff773be36bf86686650346ddd8dd2ef8241009d6 Mon Sep 17 00:00:00 2001 From: Steve Pemberton <57773252+StevePem@users.noreply.github.com> Date: Wed, 2 Aug 2023 04:36:19 +0000 Subject: [PATCH 001/103] Early morning timetable (#22) Co-authored-by: Steve Pemberton --- functions/gtfs2PtNetwork.R | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 2260ab6..5892739 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -219,13 +219,25 @@ exportGtfsSchedule <- function(links, stopTable, outputCrs){ - - vehicleTripMatching <- trips %>% - left_join(routes,by="route_id") + # duplicate stopTimes where arrrival time is at or after 24:00:00, so timetable contains early morning entries + earlyMorningStopTimes <- stopTimes %>% + filter(departure_time >= 86400) %>% # 86400 is the number of seconds in 24 hours + mutate(arrival_time = arrival_time - 86400, + departure_time = departure_time - 86400, + trip_id_orig = trip_id, # original trip_id + trip_id = paste0(trip_id, "_E")) # add 'E' (early) to trip_id, so copy is distinguished in creating ptNetwork + + # add orig trip id field to stopTimes (so 'trip_id' will distinguish duplicated 'early' trip id's, + # while 'orig_trip_id' will be used to join other tables containing only the original trip id's + stopTimes <- stopTimes %>% + mutate(trip_id_orig = trip_id) + + # combine with early morning stop times + stopTimes <- bind_rows(stopTimes, earlyMorningStopTimes) # the public transport network ptNetwork <- stopTimes %>% - dplyr::select(trip_id,arrival_time,departure_time,from_id=stop_id,from_x=x,from_y=y) %>% + dplyr::select(trip_id,arrival_time,departure_time,from_id=stop_id,from_x=x,from_y=y, trip_id_orig) %>% # filter(row_number()<200) %>% group_by(trip_id) %>% mutate(arrivalOffset=arrival_time-min(arrival_time)) %>% @@ -244,10 +256,13 @@ exportGtfsSchedule <- function(links, arrival_time=as.character(as_hms(arrival_time)), departure_time=as.character(as_hms(departure_time))) %>% # join trips and routes, so that service_type (from routes) can be used in stop_id - left_join(., trips, by = "trip_id") %>% + left_join(., trips, by = c("trip_id_orig" = "trip_id")) %>% left_join(., routes, by = "route_id") %>% as.data.frame() + vehicleTripMatching <- ptNetwork %>% + distinct(route_id, service_id, trip_id, service_type) + arrivalTimes <- ptNetwork %>% dplyr::select(arrival_time,trip_id) %>% group_by(trip_id) %>% @@ -307,8 +322,7 @@ exportGtfsSchedule <- function(links, # ./data/transitVehicles.xml: vehicle # id is just the trip_id. This means we can potentially have a different vehicle # for each trip. Have also set the vehicle type here. - vehicles <- trips %>% - inner_join(routes,by="route_id") %>% + vehicles <- vehicleTripMatching %>% dplyr::select(id=trip_id,service_type) %>% arrange(id,service_type) %>% as.data.frame() From 232d64d26a7d9941d1a8e6293daf335bc3a26688 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 09:20:22 +1000 Subject: [PATCH 002/103] Trunk roads walkable/cyclable by default --- functions/buildDefaultsDF.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/buildDefaultsDF.R b/functions/buildDefaultsDF.R index 65b8216..63c32d5 100644 --- a/functions/buildDefaultsDF.R +++ b/functions/buildDefaultsDF.R @@ -4,8 +4,8 @@ buildDefaultsDF <- function(){ ~highway , ~permlanes, ~freespeed, ~is_oneway, ~laneCapacity, ~is_cycle, ~is_walk, ~is_car, ~highway_order, "motorway" , 4 , (110/3.6), 0 , 2000 , 0 , 0 , 1 , 1 , "motorway_link" , 2 , (80/3.6) , 0 , 1500 , 0 , 0 , 1 , 8 , - "trunk" , 3 , (100/3.6), 0 , 2000 , 0 , 0 , 1 , 2 , - "trunk_link" , 2 , (80/3.6) , 0 , 1500 , 0 , 0 , 1 , 9 , + "trunk" , 3 , (100/3.6), 0 , 2000 , 1 , 1 , 1 , 2 , + "trunk_link" , 2 , (80/3.6) , 0 , 1500 , 1 , 1 , 1 , 9 , "primary" , 2 , (80/3.6) , 0 , 1500 , 1 , 1 , 1 , 3 , "primary_link" , 1 , (60/3.6) , 0 , 1500 , 1 , 1 , 1 , 10 , From 1743e138661e53e964421aeb52530510d1aeb540 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 09:52:46 +1000 Subject: [PATCH 003/103] Remove specific Melbourne corrections --- NetworkGenerator.R | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 1282384..2d165a9 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -24,11 +24,7 @@ makeNetwork<-function(outputFileName="test"){ desnificationMaxLengh=500 densifyBikeways=F - # CORRECTION - # To add/remove specified links - see osmCorrection.R - # Change to TRUE if running on Greater Melbourne OSM, Otherwise, keep FALSE - # Also you can use the same function to correct networks for your region if needed - correctNetwork=F + # CAPACITY ADJUSTMENT # A flag for whether to multiply capacity of links shorter than 100m by 2 or not # In some cases such as when building network for simulation of small samples (e.g. <1%) it might be desired adjustCapacity=F @@ -145,15 +141,8 @@ makeNetwork<-function(outputFileName="test"){ echo("Processing OSM tags and joining with defaults\n") system.time( osmAttributes <- processOsmTags(osm_metadata,defaults_df)) - # There are some roads in OSM that are not correctly attributed - # Use the function below to manually add their attributes based osm id - osmAttributesCorrected <- osmMetaCorrection(osmAttributes) - edgesOsm <- networkInput[[2]] - # Some network link corrections (+/-) specifically for Greater Melbourne OSM - if(correctNetwork) edgesOsm <- osmNetworkCorrection(networkInput) - - edgesAttributed <- edgesOsm %>% - inner_join(osmAttributesCorrected, by="osm_id") %>% + edgesAttributed <- networkInput[[2]] %>% + inner_join(osmAttributes, by="osm_id") %>% # dplyr::select(-osm_id,highway,highway_order) dplyr::select(-highway,highway_order) From fba3434ec7f47a6440169b7ca4fcccaa05f8b2a0 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 16:13:41 +1000 Subject: [PATCH 004/103] CRS as parameter not hard-coded --- NetworkGenerator.R | 25 ++++++++++++------- functions/combineRedundantEdges.R | 4 +-- functions/combineUndirectedAndDirectedEdges.R | 4 +-- functions/gtfs2PtNetwork.R | 2 +- functions/makeEdgesDirect.R | 4 +-- functions/removeRedundantUndirectedEdges.R | 4 +-- functions/simplifyIntersections.R | 6 ++--- functions/simplifyNetwork.R | 6 ++--- functions/writeOutputs.R | 12 ++++----- 9 files changed, 37 insertions(+), 30 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 2d165a9..d3ed555 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -162,7 +162,8 @@ makeNetwork<-function(outputFileName="test"){ # simplify intersections while preserving attributes and original geometry. system.time(intersectionsSimplified <- simplifyIntersections(largestComponent[[1]], largestComponent[[2]], - shortLinkLength)) + shortLinkLength, + outputCrs)) # Merge edges going between the same two nodes, picking the shortest geometry. # * One-way edges going in the same direction will be merged @@ -171,14 +172,16 @@ makeNetwork<-function(outputFileName="test"){ # * One-way edges will NOT be merged with two-way edges. # * Non-car edges do NOT count towards the merged lane count (permlanes) system.time(edgesCombined <- combineRedundantEdges(intersectionsSimplified[[1]], - intersectionsSimplified[[2]])) + intersectionsSimplified[[2]], + outputCrs)) # Merge one-way and two-way edges going between the same two nodes. In these # cases, the merged attributes will be two-way. # This guarantees that there will only be a single edge between any two nodes. system.time(combinedUndirectedAndDirected <- combineUndirectedAndDirectedEdges(edgesCombined[[1]], - edgesCombined[[2]])) + edgesCombined[[2]], + outputCrs)) # If there is a chain of edges between intersections, merge them together system.time(edgesSimplified <- simplifyLines(combinedUndirectedAndDirected[[1]], @@ -190,15 +193,18 @@ makeNetwork<-function(outputFileName="test"){ # Do a second round of simplification. system.time(edgesCombined2 <- combineRedundantEdges(noDangles[[1]], - noDangles[[2]])) + noDangles[[2]], + outputCrs)) system.time(combinedUndirectedAndDirected2 <- combineUndirectedAndDirectedEdges(edgesCombined2[[1]], - edgesCombined2[[2]])) + edgesCombined2[[2]], + outputCrs)) system.time(edgesSimplified2 <- simplifyLines(combinedUndirectedAndDirected2[[1]], combinedUndirectedAndDirected2[[2]])) system.time(edgesCombined3 <- combineRedundantEdges(edgesSimplified2[[1]], - edgesSimplified2[[2]])) + edgesSimplified2[[2]], + outputCrs)) networkMode <- addMode(edgesCombined3) @@ -217,7 +223,8 @@ makeNetwork<-function(outputFileName="test"){ # simplify geometry so all edges are straight lines system.time(networkDirect <- makeEdgesDirect(networkDensified[[1]], - networkDensified[[2]])) + networkDensified[[2]], + outputCrs)) # add mode to edges, add type to nodes, change cycleway from numbers to text networkRestructured <- restructureData(networkDirect, highway_lookup, @@ -270,8 +277,8 @@ makeNetwork<-function(outputFileName="test"){ echo("| **Launching Output Writing** |\n") echo("--------------------------------------------------------\n") - if(writeSqlite) system.time(exportSQlite(networkFinal, outputDir)) - if(writeShp) system.time(exportShp(networkFinal, outputDir)) + if(writeSqlite) system.time(exportSQlite(networkFinal, outputDir, outputCrs)) + if(writeShp) system.time(exportShp(networkFinal, outputDir, outputCrs)) if(writeXml) system.time(exportXML(networkFinal, outputDir)) } diff --git a/functions/combineRedundantEdges.R b/functions/combineRedundantEdges.R index 3294708..f6abb36 100644 --- a/functions/combineRedundantEdges.R +++ b/functions/combineRedundantEdges.R @@ -1,7 +1,7 @@ # nodes_current<-intersectionsSimplified[[1]] # edges_current<-intersectionsSimplified[[2]] -combineRedundantEdges <- function(nodes_current,edges_current){ +combineRedundantEdges <- function(nodes_current,edges_current,outputCrs){ # assuming a dataframe with a 'current_group' column, merge edges together groupingFunction <- function(grouped_edges) { @@ -130,7 +130,7 @@ combineRedundantEdges <- function(nodes_current,edges_current){ inner_join(edges_all, by="uid") %>% dplyr::select(-uid) %>% st_sf() %>% - st_set_crs(28355) + st_set_crs(outputCrs) return(list(nodes_current,edges_all_geom)) } diff --git a/functions/combineUndirectedAndDirectedEdges.R b/functions/combineUndirectedAndDirectedEdges.R index 566dfeb..17748e9 100644 --- a/functions/combineUndirectedAndDirectedEdges.R +++ b/functions/combineUndirectedAndDirectedEdges.R @@ -1,7 +1,7 @@ # nodes_current<-edgesCombined[[1]] # edges_current<-edgesCombined[[2]] -combineUndirectedAndDirectedEdges <- function(nodes_current,edges_current){ +combineUndirectedAndDirectedEdges <- function(nodes_current,edges_current,outputCrs){ edges_current <- edges_current %>% mutate(uid=row_number()) %>% @@ -80,7 +80,7 @@ combineUndirectedAndDirectedEdges <- function(nodes_current,edges_current){ inner_join(edges_grouped2, by="uid") %>% dplyr::select(-uid) %>% st_sf() %>% - st_set_crs(28355) + st_set_crs(outputCrs) return(list(nodes_current,edges_all_geom)) } diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 5892739..209556f 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -5,7 +5,7 @@ addGtfsLinks <- function(outputLocation="./test/", analysis_start = as.Date("2019-10-11","%Y-%m-%d"), analysis_end = as.Date("2019-10-17","%Y-%m-%d"), studyRegion=NA, - outputCrs=28355){ + outputCrs=outputCrs){ # outputLocation="./gtfs/" # nodes=networkRestructured[[1]] # links=networkRestructured[[2]] diff --git a/functions/makeEdgesDirect.R b/functions/makeEdgesDirect.R index 5afbaf6..12bea06 100644 --- a/functions/makeEdgesDirect.R +++ b/functions/makeEdgesDirect.R @@ -1,6 +1,6 @@ # nodes_current <-combinedUndirectedAndDirected2[[1]] # edges_current <-combinedUndirectedAndDirected2[[2]] -makeEdgesDirect <- function(nodes_current,edges_current){ +makeEdgesDirect <- function(nodes_current,edges_current,outputCrs){ # nodes_coords <- nodes_current %>% # st_drop_geometry() %>% @@ -15,7 +15,7 @@ makeEdgesDirect <- function(nodes_current,edges_current){ left_join(st_drop_geometry(nodes_current),by=c("to_id"="id")) %>% rename(toX=X,toY=Y) %>% mutate(geom=paste0("LINESTRING(",fromX," ",fromY,",",toX," ",toY,")")) %>% - st_as_sf(wkt = "geom", crs = 28355) + st_as_sf(wkt = "geom", crs = outputCrs) return(list(nodes_current,edges_current)) } diff --git a/functions/removeRedundantUndirectedEdges.R b/functions/removeRedundantUndirectedEdges.R index 7959110..0ae1ea4 100644 --- a/functions/removeRedundantUndirectedEdges.R +++ b/functions/removeRedundantUndirectedEdges.R @@ -1,7 +1,7 @@ # nodes_current<-networkSimplified[[1]] # edges_current<-networkSimplified[[2]] -removeRedundantUndirectedEdges <- function(nodes_current,edges_current,road_types){ +removeRedundantUndirectedEdges <- function(nodes_current,edges_current,road_types,outputCrs){ isOneWay <- road_types %>% dplyr::select(road_type,oneway) @@ -29,7 +29,7 @@ removeRedundantUndirectedEdges <- function(nodes_current,edges_current,road_type ungroup() %>% data.frame() %>% st_sf() %>% - st_set_crs(28355) + st_set_crs(outputCrs) return(list(nodes_current,edgesNoRedundancies)) } diff --git a/functions/simplifyIntersections.R b/functions/simplifyIntersections.R index af5fc19..9f45dc3 100644 --- a/functions/simplifyIntersections.R +++ b/functions/simplifyIntersections.R @@ -1,4 +1,4 @@ -simplifyIntersections <- function(n_df, l_df, shortLinkLength=10){ +simplifyIntersections <- function(n_df, l_df, shortLinkLength=10, outputCrs){ # shortLinkLength = 20 # l_df=largestComponent[[2]] # n_df=largestComponent[[1]] @@ -48,7 +48,7 @@ simplifyIntersections <- function(n_df, l_df, shortLinkLength=10){ filter(!id %in% comp_df$node_id) %>% rbind(dplyr::select(comp_df_centroid,id=cluster_id,is_roundabout,is_signal,X,Y)) %>% mutate(geom=paste0("POINT(",X," ",Y,")")) %>% - st_as_sf(wkt = "geom", crs = 28355) + st_as_sf(wkt = "geom", crs = outputCrs) # this function adds endpoints to the geometries addEndpoints <- function(fromX,fromY,toX,toY,geom) { @@ -84,7 +84,7 @@ simplifyIntersections <- function(n_df, l_df, shortLinkLength=10){ st_drop_geometry() %>% mutate(geom=geomExtended) %>% st_sf() %>% - st_set_crs(28355) %>% + st_set_crs(outputCrs) %>% # remove any loops filter(from_id != to_id) %>% dplyr::select(-fromX,-fromY,-toX,-toY) %>% diff --git a/functions/simplifyNetwork.R b/functions/simplifyNetwork.R index e05b98e..d6c5106 100644 --- a/functions/simplifyNetwork.R +++ b/functions/simplifyNetwork.R @@ -1,4 +1,4 @@ -simplifyNetwork <- function(n_df,l_df,osm_metadata, shortLinkLength = 20){ +simplifyNetwork <- function(n_df,l_df,osm_metadata, shortLinkLength = 20, outputCrs){ # l_df=lines_np # n_df=nodes_np # shortLinkLength = 20 @@ -55,7 +55,7 @@ simplifyNetwork <- function(n_df,l_df,osm_metadata, shortLinkLength = 20){ filter(!id %in% comp_df$node_id) %>% rbind(dplyr::select(comp_df_centroid,id=cluster_id,is_roundabout,is_signal,X,Y)) %>% mutate(geom=paste0("POINT(",X," ",Y,")")) %>% - st_as_sf(wkt = "geom", crs = 28355) + st_as_sf(wkt = "geom", crs = outputCrs) # keeping only the long edges, we alter any endpoints that are part of a # cluster, replacing them with the cluster id and the centroid coordinates @@ -72,7 +72,7 @@ simplifyNetwork <- function(n_df,l_df,osm_metadata, shortLinkLength = 20){ mutate(toY=ifelse(is.na(cluster_id),toY,Y)) %>% dplyr::select(road_type,length,from_id,to_id,fromX,fromY,toX,toY) %>% mutate(geom=paste0("LINESTRING(",fromX," ",fromY,",",toX," ",toY,")")) %>% - st_as_sf(wkt = "geom", crs = 28355) + st_as_sf(wkt = "geom", crs = outputCrs) # remove the disconnected bits diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index 5568ad1..c2eba00 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -1,6 +1,6 @@ # SQlite ------------------------------------------------------------------ -exportSQlite <- function(networkFinal, outputDir){ +exportSQlite <- function(networkFinal, outputDir, outputCrs){ cat('\n') echo(paste0('Writing the sqlite output: ', nrow(networkFinal[[2]]), @@ -11,14 +11,14 @@ exportSQlite <- function(networkFinal, outputDir){ if(class(networkFinal[[1]])[1]!="sf"){ networkFinal[[1]] <- networkFinal[[1]] %>% mutate(GEOMETRY=paste0("POINT(",x," ",y,")")) %>% - st_as_sf(wkt = "GEOMETRY", crs = 28355) %>% + st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% as.data.frame() %>% st_sf() } if(class(networkFinal[[2]])[1]!="sf"){ networkFinal[[2]] <- networkFinal[[2]] %>% mutate(GEOMETRY=paste0("LINESTRING(",fromX," ",fromY,",",toX," ",toY,")")) %>% - st_as_sf(wkt = "GEOMETRY", crs = 28355) %>% + st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% as.data.frame() %>% st_sf() } @@ -35,7 +35,7 @@ exportSQlite <- function(networkFinal, outputDir){ } # ShapeFile --------------------------------------------------------------- -exportShp <- function(networkFinal, outputDir){ +exportShp <- function(networkFinal, outputDir, outputCrs){ cat('\n') echo(paste0('Writing the ShapeFile output: ', nrow(networkFinal[[2]]), @@ -44,14 +44,14 @@ exportShp <- function(networkFinal, outputDir){ if(class(networkFinal[[1]])!="sf"){ networkFinal[[1]] <- networkFinal[[1]] %>% mutate(GEOMETRY=paste0("POINT(",x," ",y,")")) %>% - st_as_sf(wkt = "GEOMETRY", crs = 28355) %>% + st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% as.data.frame() %>% st_sf() } if(class(networkFinal[[2]])!="sf"){ networkFinal[[2]] <- networkFinal[[2]] %>% mutate(GEOMETRY=paste0("LINESTRING(",fromX," ",fromY,",",toX," ",toY,")")) %>% - st_as_sf(wkt = "GEOMETRY", crs = 28355) %>% + st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% as.data.frame() %>% st_sf() } From 0d83dbf298ab68b14ef044ba89e1fda50a662f7c Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 16:14:14 +1000 Subject: [PATCH 005/103] Fix missing CRS --- functions/densifyNetwork.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/densifyNetwork.R b/functions/densifyNetwork.R index dc6bc2a..1fb3f93 100644 --- a/functions/densifyNetwork.R +++ b/functions/densifyNetwork.R @@ -86,14 +86,14 @@ densifyNetwork <- function(networkList, minimum_length=400, densifyBikeways=F){ links_combined <- bind_rows( links_unsegmented, - links_segmented + links_segmented %>% st_set_crs(st_crs(links_unsegmented)) ) %>% dplyr::select(-tmp_id) %>% st_sf() nodes_combined <- bind_rows( nodes_df, - nodes_segmented + nodes_segmented %>% st_set_crs(st_crs(nodes_df)) ) %>% st_sf() From 0c3e3faeb3cbf1eb2c1b0de992d0771a9416d537 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 16:15:06 +1000 Subject: [PATCH 006/103] Fix error when can't find geometry for NA --- functions/gtfs2PtNetwork.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 209556f..65ea206 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -123,7 +123,7 @@ processGtfs <- function(outputLocation="./test/", st_snap_to_grid(1) # only want stops within the study region - if(!is.na(st_geometry(studyRegion))){ + if(!is.na(studyRegion)){ message("Cropping to study region") validStops <- validStops %>% filter(lengths(st_intersects(., studyRegion)) > 0) From fdc1fce9adf643d4c2a2a4cf108fa4d42f1d0dd0 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 4 Aug 2023 23:01:37 +1000 Subject: [PATCH 007/103] handle densification for lines digitised in either direction --- functions/densifyNetwork.R | 53 +++++++++++++++++++++++++++++++++----- 1 file changed, 47 insertions(+), 6 deletions(-) diff --git a/functions/densifyNetwork.R b/functions/densifyNetwork.R index 1fb3f93..8994606 100644 --- a/functions/densifyNetwork.R +++ b/functions/densifyNetwork.R @@ -61,6 +61,22 @@ densifyNetwork <- function(networkList, minimum_length=400, densifyBikeways=F){ if(i==length(links_list)) cat(paste0(i,"/",length(links_list)," rows segmented\n")) } + # add flag for direction in which geometry of links_to_segmentize is recorded + links_to_segmentize <- links_to_segmentize %>% + # join X & Y coordinates for from_id + left_join(nodes_df %>% + st_drop_geometry() %>% + dplyr::select(id, fromx = X, fromy = Y), + by = c("from_id" = "id")) %>% + # check whether startpoint of geometry matches from_id ("forward" if yes, "reverse" if no) + mutate(startpoint = st_coordinates(st_startpoint(geom))) %>% + rowwise() %>% + mutate(direction = + ifelse(startpoint[[1]] == fromx & startpoint[[2]] == fromy, + "forward", + "reverse")) %>% + ungroup() + links_segmented <- links_to_segmentize %>% st_set_geometry(st_sfc(links_list_segmented)) %>% mutate(group_id=row_number()) %>% @@ -69,21 +85,46 @@ densifyNetwork <- function(networkList, minimum_length=400, densifyBikeways=F){ st_cast(to="LINESTRING") %>% mutate(new_node_id=row_number()+max(nodes_df$id,na.rm=T)) %>% group_by(group_id) %>% - mutate(from_id=ifelse(row_number()!=1,new_node_id-1,from_id)) %>% - mutate(to_id=ifelse(row_number()!=max(row_number()),new_node_id,to_id)) %>% + # mutate(from_id=ifelse(row_number()!=1,new_node_id-1,from_id)) %>% + mutate(from_id = case_when( + direction == "forward" & row_number() == 1 ~ from_id, + direction == "forward" & row_number() != 1 ~ new_node_id-1, + direction == "reverse" & row_number() != max(row_number()) ~ new_node_id, + direction == "reverse" & row_number() == max(row_number()) ~ from_id + )) %>% + # mutate(to_id=ifelse(row_number()!=max(row_number()),new_node_id,to_id)) %>% + mutate(to_id = case_when( + direction == "forward" & row_number() != max(row_number()) ~ new_node_id, + direction == "forward" & row_number() == max(row_number()) ~ to_id, + direction == "reverse" & row_number() == 1 ~ to_id, + direction == "reverse" & row_number() != 1 ~ new_node_id-1, + )) %>% mutate(length=round(as.numeric(st_length(geom)),3)) %>% - dplyr::select(-group_id,-new_node_id) + dplyr::select(-group_id, -new_node_id, -fromx, -fromy, -startpoint) - nodes_segmented <- links_segmented %>% + nodes_segmented_forward <- links_segmented %>% + filter(direction == "forward") %>% dplyr::select(id=to_id) %>% filter(id>max(nodes_df$id,na.rm=T)) %>% - st_set_geometry(st_endpoint(.)) %>% - mutate(is_roundabout=0,is_signal=0) + st_set_geometry(st_endpoint(.)) + nodes_segmented_reverse <- links_segmented %>% + filter(direction == "reverse") %>% + dplyr::select(id=to_id) %>% + filter(id>max(nodes_df$id,na.rm=T)) %>% + st_set_geometry(st_startpoint(.)) + + nodes_segmented <- rbind(nodes_segmented_forward, + nodes_segmented_reverse) %>% + mutate(is_roundabout=0,is_signal=0) + nodes_segmented <- bind_cols(nodes_segmented, data.frame(st_coordinates(nodes_segmented))) %>% dplyr::select(id,is_roundabout,is_signal,X,Y) + links_segmented <- links_segmented %>% + dplyr::select(-direction) + links_combined <- bind_rows( links_unsegmented, links_segmented %>% st_set_crs(st_crs(links_unsegmented)) From c99bea949c635e8662fd0096ab3ce651912f2fd7 Mon Sep 17 00:00:00 2001 From: StevePem Date: Sat, 5 Aug 2023 09:19:09 +1000 Subject: [PATCH 008/103] correctly recognise class of network --- functions/writeOutputs.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index c2eba00..672a8d1 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -41,14 +41,14 @@ exportShp <- function(networkFinal, outputDir, outputCrs){ echo(paste0('Writing the ShapeFile output: ', nrow(networkFinal[[2]]), ' links and ', nrow(networkFinal[[1]]),' nodes\n')) - if(class(networkFinal[[1]])!="sf"){ + if(class(networkFinal[[1]])[1]!="sf"){ networkFinal[[1]] <- networkFinal[[1]] %>% mutate(GEOMETRY=paste0("POINT(",x," ",y,")")) %>% st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% as.data.frame() %>% st_sf() } - if(class(networkFinal[[2]])!="sf"){ + if(class(networkFinal[[2]])[1]!="sf"){ networkFinal[[2]] <- networkFinal[[2]] %>% mutate(GEOMETRY=paste0("LINESTRING(",fromX," ",fromY,",",toX," ",toY,")")) %>% st_as_sf(wkt = "GEOMETRY", crs = outputCrs) %>% From d5a50055bfd1741a7089758fb529fa0b16a8a635 Mon Sep 17 00:00:00 2001 From: StevePem Date: Sat, 5 Aug 2023 11:11:38 +1000 Subject: [PATCH 009/103] fix writing vehicleTripMatching --- functions/gtfs2PtNetwork.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 65ea206..23be99c 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -503,12 +503,12 @@ exportGtfsSchedule <- function(links, str<-paste0(str," \n") } - if (i%%writeInterval==0 || i==nrow(vehicleTripMatching)) { + if (i%%writeInterval==0 || i==length(transitRoutes)) { cat(str,file=outxml,append=TRUE) str<-"" # clear the buffer after writing it out } # report progress - if (i%%50==0 || i==nrow(vehicleTripMatching)) printProgress(i,nrow(vehicleTripMatching),' vehicleTripMatching') + if (i%%50==0 || i==length(transitRoutes)) printProgress(i,length(transitRoutes),' vehicleTripMatching') } cat(paste0(" \n"),file=outxml,append=TRUE) cat(paste0("\n"),file=outxml,append=TRUE) From b661471d8d7fb79d187b768955d7ca8d00b9c05f Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 10 Aug 2023 08:41:31 +1000 Subject: [PATCH 010/103] cycling file locations and flags --- NetworkGenerator.R | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index d3ed555..4395e04 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -11,7 +11,7 @@ makeNetwork<-function(outputFileName="test"){ # Note that osm.pbf format is not yet supported osmExtract='./data/melbourne.osm' # If procesOsm=F, set the following to the network sqlite file - networkSqlite="data/network.sqlite" + networkSqlite="./data/melbourne_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 @@ -22,7 +22,7 @@ makeNetwork<-function(outputFileName="test"){ # DENSIFICATION desnificationMaxLengh=500 - densifyBikeways=F + densifyBikeways=T # CAPACITY ADJUSTMENT # A flag for whether to multiply capacity of links shorter than 100m by 2 or not @@ -31,11 +31,11 @@ makeNetwork<-function(outputFileName="test"){ # ELEVATION # A flag for whether to add elevation or not - addElevation=F + addElevation=T # Digital elevation model file - make sure it is in the same coordinate system as your network - demFile= 'data/DEMx10EPSG28355.tif' + demFile= "./data/DEM_melbourne.tif" # DEM's multiplier- set to 1 if DEM contains actual elevation - ElevationMultiplier=10 + ElevationMultiplier=1 # GTFS addGtfs=F From 51c8fc8777ab168a0794dc74c67c8487189e35e3 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 15 Aug 2023 11:45:11 +1000 Subject: [PATCH 011/103] LTS, LTS impedance, slope impedance --- CyclingImpedances.R | 74 ++++++++++++++ Destinations.R | 169 ++++++++++++++++++++++++++++++++ NetworkGenerator.R | 8 +- functions/addLTS.R | 152 ++++++++++++++++++++++++++++ functions/addSlopeImped.R | 26 +++++ functions/addTraffic.R | 46 +++++++++ functions/getDestinationTypes.R | 159 ++++++++++++++++++++++++++++++ functions/makeEdgesOneway.R | 70 +++++++++++++ 8 files changed, 703 insertions(+), 1 deletion(-) create mode 100644 CyclingImpedances.R create mode 100644 Destinations.R create mode 100644 functions/addLTS.R create mode 100644 functions/addSlopeImped.R create mode 100644 functions/addTraffic.R create mode 100644 functions/getDestinationTypes.R create mode 100644 functions/makeEdgesOneway.R diff --git a/CyclingImpedances.R b/CyclingImpedances.R new file mode 100644 index 0000000..e38e27d --- /dev/null +++ b/CyclingImpedances.R @@ -0,0 +1,74 @@ +# Using a network created by 'NetworkGenerator.R', add impedances for +# calculating cycling accessibility + +# Assumes input network is a one-way network that includes elevation, +# and that one-way daily traffic volumes are available + +addImpedances <- function() { + + # Parameters ----------------------------------------------------------------- + # Input network, to which cycling impedances are to be added, with layer names + # input.network <- "./output/test/network.sqlite" + input.network <- "./output/test/melbourne_network.sqlite" #<<< OLD VERSION FOR TESTING + input.node.layer <- "nodes" + input.link.layer <- "links" + + # Traffic file, with links layer name - file must match input.network, + # and with a 'total_vol' column containing 1-way daily traffic + # traffic.file <- "./output/test/network_traffic.sqlite" + traffic.file <- "./output/test/links_with_traffic.sqlite" #<<< OLD VERSION FOR TESTING + traffic.link.layer <- "cars_aht" + + # Traffic multiplier - where volumes are for a sample of traffic only (eg + # multiplier of 20 if the volumes are a 5% sample; use 1 if full volumes) + traffic.multiplier <- 10 + + # Output location - same directory as input.network + output.location <- paste0(path_dir(input.network), "/networkWeighted.sqlite") + + + # Packages and functions ----------------------------------------------------- + library(dplyr) + library(sf) + library(fs) + + dir_walk(path="./functions/",source, recurse=T, type = "file") + + + # Load input network and traffic file ---------------------------------------- + input.nodes <- st_read(input.network, layer = input.node.layer) + input.links <- st_read(input.network, layer = input.link.layer) + traffic.links <- st_read(traffic.file, layer = traffic.link.layer) + + + # Add LTS and its impedance -------------------------------------------------- + echo("Adding daily traffic volumes\n") + networkTraffic <- addTraffic(input.nodes, + input.links, + traffic.links, + traffic.multiplier) + ## TO DO - maybe traffic can just be joined on link_id? See whether traffic + ## file neatly uses the link_id's from the one-way input + + echo("Adding LTS and its impedance\n") + networkLTS <- addLTS(networkTraffic[[1]], networkTraffic[[2]]) + + + # Add slope impedance -------------------------------------------------------- + echo("Adding slope impedance") + networkSlope <- addSlopeImped(networkLTS[[1]], networkLTS[[2]]) + + + # Calculate total weight ----------------------------------------------------- + echo("Calculating cycling weight") + networkWeighted <- + list(networkSlope[[1]], + networkSlope[[2]] %>% + mutate(cycle.weight = length + LTS_imped + slope_imped)) + + + # write output --------------------------------------------------------------- + st_write(networkWeighted[[1]], "./output/test/networkWeighted.sqlite", layer = "nodes") + st_write(networkWeighted[[2]], "./output/test/networkWeighted.sqlite", layer = "links") + +} \ No newline at end of file diff --git a/Destinations.R b/Destinations.R new file mode 100644 index 0000000..d60c9d8 --- /dev/null +++ b/Destinations.R @@ -0,0 +1,169 @@ +# Make file of destinations required for accessibilty routing + +library(dplyr) +library(sf) +library(osmextract) + +# 1 Download OSM extract ---- +# -----------------------------------------------------------------------------# +# Download from https://www.interline.io/osm/extracts/ + +## Downloaded for Melbourne - melbourne_australia.osm.pbf + + +# 2 Converting to .gpkg format ---- +# -----------------------------------------------------------------------------# +# input file name and project CRS +INPUTFILE <- "./data/melbourne_australia.osm.pbf" + +PROJECT.CRS = 28355 + +# check layers +st_layers(INPUTFILE) + +# check keys +options(max.print = 2000) +polygon.tags <- oe_get_keys(INPUTFILE, layer = "multipolygons") %>% sort() +point.tags <- oe_get_keys(INPUTFILE, layer = "points") %>% sort() +line.tags <- oe_get_keys(INPUTFILE, layer = "lines") %>% sort() + +# create gpkg file in same directory as INPUTFILE, using the 'extra_tags' +# argument for specific extra tags required for various destination types +oe_vectortranslate(INPUTFILE, layer = "multipolygons", + extra_tags = c("access", "building", "grades", "healthcare", + "healthcare:speciality","isced:level", + "network", "operator", + "operator:type", "public_transport", "railway", + "school", "social_facility", "sport", + "tourism", "train")) +oe_vectortranslate(INPUTFILE, layer = "points", + extra_tags = c("access", "amenity", "building", "grades", + "healthcare", "healthcare:speciality", + "isced:level", "landuse", "leisure", + "network", "operator", + "operator:type", "public_transport", "railway", + "school", "shop", "social_facility", "sport", + "tourism", "train")) +oe_vectortranslate(INPUTFILE, layer = "lines", + extra_tags = c("access", "amenity", "building", "grades", + "healthcare", "healthcare:speciality", + "isced:level", "landuse", "leisure", + "network", "operator", + "operator:type", "public_transport", "railway", + "school", "shop", "social_facility", "sport", + "tourism", "train", + "smoothness", "surface")) +oe_vectortranslate(INPUTFILE, layer = "multilinestrings") +oe_vectortranslate(INPUTFILE, layer = "other_relations") + + +# 3 Read in the .gpkg file ---- +# -----------------------------------------------------------------------------# +GPKG <- "./data/melbourne_australia.gpkg" + +polygons <- st_read(GPKG, layer = "multipolygons") %>% st_transform(PROJECT.CRS) +points <- st_read(GPKG, layer = "points") %>% st_transform(PROJECT.CRS) +lines <- st_read(GPKG, layer = "lines") %>% st_transform(PROJECT.CRS) +multilines <- st_read(GPKG, layer = "multilinestrings") %>% st_transform(PROJECT.CRS) +other_relations <- st_read(GPKG, layer = "other_relations") %>% st_transform(PROJECT.CRS) + + +# 4 Extract required destinations ---- +# -----------------------------------------------------------------------------# + +## 4.1 Tag combinations for feature types and network ---- +## ----------------------------------------------------------------------------# +# load functions for locating specific feature types +source("./functions/getDestinationTypes.R") + +# load network +NETWORK <- "./output/test/network.sqlite" #<<< CHECK FINAL NAME +NODE.LAYER <- "nodes" +LINK.LAYER <- "links" + +network.nodes <- st_read(NETWORK, layer = NODE.LAYER) +network.links <- st_read(NETWORK, layer = LINK.LAYER) + + +## 4.2 Compile point and polygon destinations ---- +## ----------------------------------------------------------------------------# +destination.layer <- function(layer) { + return( + bind_rows( + getPlayground(layer) %>% mutate(dest_type = "playground"), + getPark(layer) %>% mutate(dest_type = "park"), + getSport(layer) %>% mutate(dest_type = "sport"), + getKindergarten(layer) %>% mutate(dest_type = "kindergarten"), + getCommunity(layer) %>% mutate(dest_type = "community_centre"), + getLibrary(layer) %>% mutate(dest_type = "library"), + getPrimary(layer) %>% mutate(dest_type = "primary_school"), + getSecondary(layer) %>% mutate(dest_type = "secondary_school"), + getClinic(layer) %>% mutate(dest_type = "health_clinic"), + getDentist(layer) %>% mutate(dest_type = "dentist"), + getPharmacy(layer) %>% mutate(dest_type = "pharmacy"), + getConvenience(layer) %>% mutate(dest_type = "convenience_store"), + getSupermarket(layer) %>% mutate(dest_type = "supermarket"), + getShop(layer) %>% mutate(dest_type = "shop"), + getPost(layer) %>% mutate(dest_type = "post_office"), + getBank(layer) %>% mutate(dest_type = "bank"), + getRestaurant(layer) %>% mutate(dest_type = "restaurant"), + getCafe(layer) %>% mutate(dest_type = "cafe") + )) +} + +# create tables of destinations, and allocate unique id's (so features with +# multiple nodes can be grouped by the id where required) +destination.pt <- + bind_rows(destination.layer(points), + getStation() %>% mutate(dest_type = "railway_station")) %>% + mutate(dest_id = row_number()) + +destination.poly <- + destination.layer(polygons) %>% + mutate(dest_id = max(destination.pt$dest_id) + row_number()) + + +## 4.3 Find relevant nodes ---- +## ----------------------------------------------------------------------------# +# TO CONFIRM:- +# For all destinations except parks and schools ('small features'), relevant +# node is nearest node to point or to polygon centroid +# For parks and schools ('large features'): +# - points are buffered to 50m to create a polygon feature, +# - for buffered points and polygons, relevant nodes are all nodes within the +# feature and terminal nodes of links within 30m of boundary, or if none, +# then nearest node to boundary + +# Maybe this should be all nodes within 30m of buffered feature, and if link is within +# 30m of boundary but doesn't have a node within the buffer, then also its closest terminal +# node ??? + +dest.small <- bind_rows(destination.pt, + destination.poly %>% st_centroid()) %>% + filter(!(dest_type %in% c("park", "primary_school", "secondary_school"))) +near_node <- network.nodes$id[st_nearest_feature(dest.small, network.nodes)] +dest.small.with.nodes <- cbind(dest.small %>% st_drop_geometry(), near_node) + + +## NOTE - the code below is a simplified version which just finds nodes within +## feature or its 30m buffer, or nearest node if none - doesn't extend to terminal +## nodes of nearby features +dest.large <- bind_rows(destination.pt %>% st_buffer(50), + destination.poly) %>% + filter(dest_type %in% c("park", "primary_school", "secondary_school")) + +dest.large.found.nodes <- dest.large %>% + st_buffer(30) %>% + st_intersection(., network.nodes %>% dplyr::select(near_node = id)) + +dest.large.need.nodes <- dest.large %>% + filter(!(dest_id %in% dest.large.found.nodes$dest_id)) +near_node <- network.nodes$id[st_nearest_feature(dest.large.need.nodes, network.nodes)] + +dest.large.with.nodes <- bind_rows(dest.large.found.nodes %>% st_drop_geometry(), + cbind(dest.large.need.nodes %>% st_drop_geometry(), + near_node)) + +dest.with.nodes <- bind_rows(dest.small.with.nodes, + dest.large.with.nodes) + diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 4395e04..0f25329 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -270,7 +270,13 @@ makeNetwork<-function(outputFileName="test"){ outputCrs=outputCrs)) } - networkFinal <- networkRestructured + # Make network oneway (required because cycling impedances such as level of + # traffic stress and slope may be different in each direction) + echo("Making all links one way\n") + networkOneway <- makeEdgesOneway(networkRestructured[[1]], + networkRestructured[[2]]) + + networkFinal <- networkOneway # writing outputs echo("========================================================\n") diff --git a/functions/addLTS.R b/functions/addLTS.R new file mode 100644 index 0000000..9b1baa3 --- /dev/null +++ b/functions/addLTS.R @@ -0,0 +1,152 @@ +# function to add level of traffic stress to network, based on highway class, +# traffic (ADT) and speed, and related impedance + +# traffic volumes (eg ADT 10000) are for two-way traffic, so are halved +# (eg 10000 / 2) in order to apply to the one-way links in edges_current + + + +addLTS <- function(nodes_current, edges_current) { + + # testing + # nodes_current <- networkTraffic[[1]] + # edges_current <- networkTraffic[[2]] + + # assign LTS to edges + # '1' to '4' are categories of increasing stress, as per table below] + + # road groups + local <- c("residential", "road", "unclassified", "living_street", "service") + tertiary <- c("tertiary", "tertiary_link") + secondary <- c("secondary", "secondary_link") + primary <- c("primary", "primary_link") + + + edges_current <- edges_current %>% + # make speed field (rounded, to avoid floating point issues) + mutate(speed = round(freespeed * 3.6)) %>% + # add LTS + mutate(lvl_traf_stress = case_when( + + # LTS 1 - off-road paths + cycleway %in% c("bikepath", "shared_path") ~ 1, + highway %in% c("cycleway", "track", "pedestrian", + "footway", "path", "corridor", "steps") ~ 1, + + # LTS 1 - separated cycle lanes + cycleway == "separated_lane" & speed <= 50 ~ 1, + + # LTS 1 - on-road cycle lanes + cycleway == "simple_lane" & + highway %in% c(local, tertiary, secondary) & + ADT <= 10000 /2 & speed <= 30 ~ 1, + + # LTS 1 - mixed traffic + highway %in% local & ADT <= 2000 / 2 & speed <= 30 ~ 1, + + # LTS 2 - separated cycle lanes + cycleway == "separated_lane" & speed <= 60 ~ 2, + + # LTS 2 - on-road cycle lanes + cycleway == "simple_lane" & + highway %in% c(local, tertiary, secondary) & + ADT <= 10000 / 2 & speed <= 50 ~ 2, + cycleway == "simple_lane" & + (highway %in% primary | + (highway %in% c(local, tertiary, secondary) & + ADT > 10000 / 2)) & + speed <= 40 ~ 2, + + # LTS 2 - mixed traffic + highway %in% local & ADT <= 750 / 2 & speed <= 50 ~ 2, + highway %in% local & ADT <= 2000 / 2 & speed <= 40 ~ 2, + highway %in% c(local, tertiary) & ADT <= 3000 / 2 & speed <= 30 ~ 2, + + # LTS 3 - on-road cycle lanes + cycleway == "simple_lane" & speed <= 60 ~ 3, + + # LTS 3 - mixed traffic + highway %in% local & ADT <= 750 / 2 & speed <= 60 ~ 3, + highway %in% c(local, tertiary) & ADT <= 3000 / 2 & speed <= 50 ~ 3, + (highway %in% c(secondary, primary) | + (highway %in% c(local, tertiary) & ADT > 3000 / 2)) & + speed <= 40 ~ 3, + + # LTS 4 - everything not covered above + TRUE ~ 4 + )) + + # check to test how many in each category + # LTS_table <- edges_current %>% + # st_drop_geometry() %>% + # group_by(highway, lvl_traf_stress) %>% + # summarise(n = n()) + + # assign LTS to nodes, based on highest + # begin with all nodes (from and to) and the LTS level of the associated link + node_max_lookup <- rbind(edges_current %>% + st_drop_geometry() %>% + dplyr::select(id = from_id, LTS = lvl_traf_stress), + edges_current %>% + st_drop_geometry() %>% + dplyr::select(id = to_id, LTS = lvl_traf_stress)) %>% + group_by(id) %>% + # find highest level of LTS for links connecting with the node + summarise(max_LTS = max(LTS)) %>% + ungroup() + + # Calculate impedance for intersection, and total impedance + + # Impedance for intersection applies to the to-node (the intersection + # that the link arrives at), and only if it's unsignalised + # penalty is calculated as: + # penaltya = (Buffb – Buffa) * (IFb – 1)for + # where + # a is the link for which the penalty (penaltya) is being calculated + # b is the highest-ranked other link at the relevant intersection + # Buffa and Buffb are the buffer distances for a and b, where the buffer + # distance is 0, 5, 10 or 25m for a link of LTS 1, 2, 3 or 4 respectively + # IFb is the impedance factor for b, where the impedance factor is 1.00, + # 1.05, 1.10 or 1.15 for a link of LTS 1, 2, 3 or 4 respectively + + # LTS impedance, which is to be added to the length of the link and any other + # impedances (outside this function) to create the weight for the link, + # is the length-based impedance for the link plus its intersection impedance. + # - Length-based impedance is the link multiplied by its impedance factor + # minus 1 (that is, subtracting 1 so it os only the additional impedance, + # not the length itself: + # total_imped = length * (IFa - 1) + # where IFa is the impedance factor for a, where the impedance factor is 1.00, + # 1.05, 1.10 or 1.15 for a link of LTS 1, 2, 3 or 4 respectively + # - Intersection impedance is calcualted as above + + buff_imped_df <- data.frame(cbind(LTS = c(1, 2, 3, 4), + buffer = c(0, 5, 10, 25), + imped = c(1, 1.05, 1.10, 1.15))) + + edges_current <- edges_current %>% + # join node intersection details for the to-node + left_join(., nodes_current %>% + st_drop_geometry() %>% + dplyr::select(id, type), + by = c("to_id" = "id")) %>% + # join the node max LTS buffer & impedance details for the to-node + left_join(., node_max_lookup, by = c("to_id" = "id")) %>% + left_join(., buff_imped_df, by = c("lvl_traf_stress" = "LTS")) %>% + # and the buff_imped_df details for the max LTS + left_join(., buff_imped_df, by = c("max_LTS" = "LTS"), suffix = c(".a", ".b")) %>% + + # calculate intersection impedance, using formula above (unsignalised only) + mutate(intersec_imped = ifelse(type %in% c("simple_intersection", + "simple_roundabout"), + (buffer.b - buffer.a) * (imped.b - 1), + 0)) %>% + # calculate total LTS impedance (to be added to length along with other impedances) + mutate(LTS_imped = (length * (imped.a - 1)) + intersec_imped) %>% + + # remove unwanted fields + dplyr::select(-speed, -type, -max_LTS, -buffer.a, -buffer.b, + -imped.a, -imped.b, -intersec_imped) + + return(list(nodes_current, edges_current)) +} diff --git a/functions/addSlopeImped.R b/functions/addSlopeImped.R new file mode 100644 index 0000000..c5ee42e --- /dev/null +++ b/functions/addSlopeImped.R @@ -0,0 +1,26 @@ +# function to add impedance to network links, based on slope + +# impedance is 50m of length per 1m of climb (Ziemke, D, Metzler, S & Nagel, K 2019, +# 'Bicycle traffic and its interaction with motorized traffic in an agent-based +# transport simulation framework', Future Generation Computer Systems, vol. 97, pp. 30-40.) + +addSlopeImped <- function(nodes_current, edges_current) { + + # testing + # nodes_current <- networkLTS[[1]] + # edges_current <- networkLTS[[2]] + + edges_current <- edges_current %>% + + # some coastal links are missing elevation; make slope 0 + mutate(slope_pct = ifelse(is.na(slope_pct), 0, slope_pct)) %>% + + # 50m of length per 1m of climb + # climb = run * slope_pct / 100 + mutate(slope_imped = case_when( + slope_pct <= 0 ~ 0, + slope_pct > 0 ~ (length * slope_pct / 100) * 50 + )) + + return(list(nodes_current, edges_current)) +} diff --git a/functions/addTraffic.R b/functions/addTraffic.R new file mode 100644 index 0000000..22c4cc5 --- /dev/null +++ b/functions/addTraffic.R @@ -0,0 +1,46 @@ +# function to add traffic to network links + +# assumes a network links file, and a second file which is the same (or, at +# least, contains the same links with the same 'from_id', 'to_id' and +# highway type), and with a 'total_vol' column containing 1-way daily traffic + +# traffic volumes are for each link in its operating direction (so one way +# traffic only); when comparing to two-way traffic volumes in 'addLTS.R', the +# two-way volumes need to be halved + +# the multiplier is used where volumes are for a sample of traffic only +# (eg use multiplier of 20 if the volumes are a 5% sample) + +addTraffic <- function(network.nodes, network.links, traffic.links, multiplier = 1) { + + # testing + # network.nodes = st_read("./output/test/melbourne_network.sqlite", layer = "nodes) + # network.links = st_read("./output/test/melbourne_network.sqlite", layer = "links") + # traffic.links = st_read("./output/test/links_with_traffic.sqlite", layer = "cars_aht") + # multiplier = 10 + + # select traffic fields from traffic.links + traffic <- traffic.links %>% + dplyr::select(from_id, to_id, highway, length, total_vol) %>% + st_drop_geometry() + + # join traffic to links, in each direction using 'highway' and 'length' as + # additional join ids: simplification can result in two links sharing nodes (eg loop) + links.with.traffic <- network.links %>% + left_join(traffic, by = c("from_id", "to_id", "highway", "length")) %>% + # convert NAs to zeros, and apply multiplier + mutate(total_vol = if_else(is.na(total_vol), 0, total_vol)) %>% + mutate(total_vol = total_vol * multiplier) %>% + rename(ADT = total_vol) %>% + + # remove duplicates, if any, which have arisen because of original duplicate links, + # summing their ATDs: group, sum the ATDs, then remove the duplicate with 'distinct' + group_by(link_id) %>% + mutate(ADT = sum(ADT)) %>% + distinct() %>% + ungroup() + + # return network with traffic volumes added + return(list(network.nodes, links.with.traffic)) + +} diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R new file mode 100644 index 0000000..9010e82 --- /dev/null +++ b/functions/getDestinationTypes.R @@ -0,0 +1,159 @@ +# functions to locate specific types of destinations + +## All tag combinations below can be applied to both points and polygons, except +## railway stations which are a combination of points, polygons and lines, and +## require aggregation within a boundary distance + +# 1 open space ---- +getPlayground <- function(layer) { + return(layer %>% filter(leisure == "playground")) +} + +getPark <- function(layer) { + return(layer %>% filter(leisure == "park")) +} + + +# 2 sport ---- +getSport <- function(layer) { + return(layer %>% filter(!is.na(sport))) +} + + +# 3 lifelong learning ---- +getKindergarten <- function(layer) { + return(layer %>% filter(amenity == "kindergarten" | school == "kindergarten")) +} + +getCommunity <- function(layer) { + return(layer %>% filter(amenity == "community_centre")) +} + +getLibrary <- function(layer) { + return(layer %>% filter(amenity == "library")) +} + + +# 4 schools ---- +getPrimary <- function(layer) { + return( + layer %>% + rowwise() %>% + mutate(lowest_grade = unlist(strsplit(grades, "-"))[1]) %>% + ungroup() %>% + filter(amenity == "school" & + (as.numeric(lowest_grade) < 7 | + lowest_grade %in% c("P", "K") | + school %in% c("primary", "primary;secondary") | + isced_level %in% c("0", "0-1", "0-2", "0-3", "0;1", "0;2", "0;3", + "1", "1-2", "1-3", "1;2", "1;3") | + grepl("Primary", name)) & + # omit other types such as special_education_needs, prison + (school %in% c("primary", "primary;secondary") | is.na(school))) + ) +} + +getSecondary <- function(layer) { + return( + layer %>% + rowwise() %>% + # some are eg "5-8; 10-12' or '0-4;9' - first, find the grade after the + # last hyphen, then find the grade after the last semi=colon + mutate(highest_grade = dplyr::last(unlist(strsplit(grades, "-")))) %>% + mutate(highest_grade = dplyr::last(unlist(strsplit(highest_grade, ";")))) %>% + ungroup() %>% + filter(amenity == "school" & + (as.numeric(highest_grade) >= 7 | + school %in% c("secondary", "primary;secondary") | + isced_level %in% c("0-2", "0-3", "0;2", "0;3", + "1-2", "1-3", "1;2", "1;3", + "2", "3", "2-3", "2;3") | + (grepl("Secondary", name) | grepl("High ", name))) & # space after "High" to avoid eg "Highview Primary" + # omit other types such as special_education_needs, prison + (school %in% c("secondary", "primary;secondary") | is.na(school))) + ) +} + + +# 5 health ---- +getClinic <- function(layer) { + return(layer %>% + filter(amenity %in% c("clinic", "doctor", "doctors") | + healthcare %in% c("clinic", "doctor", "doctors"))) +} + +getDentist <- function(layer) { + return(layer %>% filter(amenity == "dentist" | healthcare == "dentist")) +} + +getPharmacy <- function(layer) { + return(layer %>% + filter(amenity %in% c("chemist", "pharmacy") | + healthcare %in% c("chemist", "pharmacy") | + shop %in% c("chemist", "pharmacy"))) +} + + +# 6 shopping ---- +getConvenience <- function(layer) { + return(layer %>% filter(shop == "convenience")) +} + +getSupermarket <- function(layer) { + return(layer %>% filter(shop == "supermarket")) +} + +getShop <- function(layer) { + return(layer %>% filter(!(is.na(shop)))) +} + +getPost <- function(layer) { + return(layer %>% filter(amenity == "post_office")) +} + +getBank <- function(layer) { + return(layer %>% filter(amenity == "bank")) +} + + +# 7 eating ---- +getRestaurant <- function(layer) { + return(layer %>% filter(amenity == "restaurant")) +} + +getCafe <- function(layer) { + return(layer %>% filter(amenity == "cafe")) +} + +# 8 railway stations ---- +# Returns list of stations as points +# Note the buffer distance of 100m below; closest railway stations in Melbourne are +# Riversdale & Willison (about 420m) +getStation <- function() { + # general filter to find station objects + filterStation <- function(layer) { + return(layer %>% + filter((public_transport == "station" | public_transport == "stop_position") & + (railway == "station" | railway == "stop" | train == "yes" | + grepl("train", tolower(network)) | grepl("train", tolower(operator))) & + (is.na(tourism) | tourism != "yes") & + (is.na(railway) | railway != "construction"))) + } + + # find each object, and buffer to 100m + buff.dist <- 100 + station.pt <- filterStation(points) %>% st_buffer(buff.dist) + station.poly <- filterStation(polygons) %>% st_buffer(buff.dist) + station.line <- filterStation(lines) %>% st_buffer(buff.dist) + + # dissolve, then separate to individual polygons + stations <- bind_rows(station.pt, station.poly, station.line) %>% + st_union() %>% + st_as_sf() %>% + st_cast("POLYGON") %>% + st_centroid() %>% + # label geometry column + rename(geometry = x) + +} + diff --git a/functions/makeEdgesOneway.R b/functions/makeEdgesOneway.R new file mode 100644 index 0000000..66a2878 --- /dev/null +++ b/functions/makeEdgesOneway.R @@ -0,0 +1,70 @@ +# function to convert two-way edges to one-way + +makeEdgesOneway <- function(nodes_current, edges_current) { + + # testing + # nodes_current <- input.nodes + # edges_current <- input.links + + # ensure fromx, fromy, tox and toy column names are lower case (eg not 'fromX') + names.to.change <- c("fromX", "fromY", "toX", "toY") + edges_current <- rename_with(edges_current, tolower, any_of(names.to.change)) + + # select only two-way edges + edges_twoway <- edges_current %>% + filter(is_oneway == 0) + + # swap from/to details + edges_twoway_reversed <- edges_twoway %>% + # store original from/to details + mutate(orig_from_id = from_id, + orig_to_id = to_id, + orig_fromx = fromx, + orig_fromy = fromy, + orig_tox = tox, + orig_toy = toy) %>% + # swap from/to + mutate(from_id = orig_to_id, + to_id = orig_from_id, + fromx = orig_tox, + fromy = orig_toy, + tox = orig_fromx, + toy = orig_fromy) + + # if elevation is present, use the reverse slope + if("rvs_slope_pct" %in% colnames(edges_twoway_reversed)) { + edges_twoway_reversed <- edges_twoway_reversed %>% + mutate(slope_pct = rvs_slope_pct) + } + + # select required fields (excluding 'is_oneway') [note that "id" is not + # retained here - it is replaced by link_id] + required_fields <- c("from_id", "to_id", "fromx", "fromy", "tox", "toy", + "length", "freespeed", "permlanes", "capacity", "highway", + "cycleway", "surface", "is_cycle", "is_walk", "is_car", + "modes") + if ("slope_pct" %in% colnames(edges_twoway_reversed)) { + required_fields <- c(required_fields, "slope_pct") + } + edges_twoway_reversed <- edges_twoway_reversed %>% + dplyr::select(all_of(required_fields)) + + # modify original edges to rename fwd_slope_pct if present + if ("fwd_slope_pct" %in% colnames(edges_current)) { + edges_current <- edges_current %>% + rename(slope_pct = fwd_slope_pct) + } + + # exclude 'is_oneway' from original edges, and bind with reversed two-way edges + edges_current <- edges_current %>% + dplyr::select(all_of(required_fields)) %>% + rbind(., edges_twoway_reversed) + + # add link_id, based on rownumber (at the end, not beginning, because igraph + # requires from_id and to_id to be the first two columns) + edges_current <- edges_current %>% + mutate(link_id = row_number()) + + + return(list(nodes_current, edges_current)) +} From be526e916f2ec2697db9456cd1a7f17a5665ae57 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 16 Aug 2023 14:55:35 +1000 Subject: [PATCH 012/103] Add destinations to network --- Destinations.R | 169 -------------------------------- NetworkGenerator.R | 20 ++++ functions/addDestinations.R | 190 ++++++++++++++++++++++++++++++++++++ functions/writeOutputs.R | 10 ++ 4 files changed, 220 insertions(+), 169 deletions(-) delete mode 100644 Destinations.R create mode 100644 functions/addDestinations.R diff --git a/Destinations.R b/Destinations.R deleted file mode 100644 index d60c9d8..0000000 --- a/Destinations.R +++ /dev/null @@ -1,169 +0,0 @@ -# Make file of destinations required for accessibilty routing - -library(dplyr) -library(sf) -library(osmextract) - -# 1 Download OSM extract ---- -# -----------------------------------------------------------------------------# -# Download from https://www.interline.io/osm/extracts/ - -## Downloaded for Melbourne - melbourne_australia.osm.pbf - - -# 2 Converting to .gpkg format ---- -# -----------------------------------------------------------------------------# -# input file name and project CRS -INPUTFILE <- "./data/melbourne_australia.osm.pbf" - -PROJECT.CRS = 28355 - -# check layers -st_layers(INPUTFILE) - -# check keys -options(max.print = 2000) -polygon.tags <- oe_get_keys(INPUTFILE, layer = "multipolygons") %>% sort() -point.tags <- oe_get_keys(INPUTFILE, layer = "points") %>% sort() -line.tags <- oe_get_keys(INPUTFILE, layer = "lines") %>% sort() - -# create gpkg file in same directory as INPUTFILE, using the 'extra_tags' -# argument for specific extra tags required for various destination types -oe_vectortranslate(INPUTFILE, layer = "multipolygons", - extra_tags = c("access", "building", "grades", "healthcare", - "healthcare:speciality","isced:level", - "network", "operator", - "operator:type", "public_transport", "railway", - "school", "social_facility", "sport", - "tourism", "train")) -oe_vectortranslate(INPUTFILE, layer = "points", - extra_tags = c("access", "amenity", "building", "grades", - "healthcare", "healthcare:speciality", - "isced:level", "landuse", "leisure", - "network", "operator", - "operator:type", "public_transport", "railway", - "school", "shop", "social_facility", "sport", - "tourism", "train")) -oe_vectortranslate(INPUTFILE, layer = "lines", - extra_tags = c("access", "amenity", "building", "grades", - "healthcare", "healthcare:speciality", - "isced:level", "landuse", "leisure", - "network", "operator", - "operator:type", "public_transport", "railway", - "school", "shop", "social_facility", "sport", - "tourism", "train", - "smoothness", "surface")) -oe_vectortranslate(INPUTFILE, layer = "multilinestrings") -oe_vectortranslate(INPUTFILE, layer = "other_relations") - - -# 3 Read in the .gpkg file ---- -# -----------------------------------------------------------------------------# -GPKG <- "./data/melbourne_australia.gpkg" - -polygons <- st_read(GPKG, layer = "multipolygons") %>% st_transform(PROJECT.CRS) -points <- st_read(GPKG, layer = "points") %>% st_transform(PROJECT.CRS) -lines <- st_read(GPKG, layer = "lines") %>% st_transform(PROJECT.CRS) -multilines <- st_read(GPKG, layer = "multilinestrings") %>% st_transform(PROJECT.CRS) -other_relations <- st_read(GPKG, layer = "other_relations") %>% st_transform(PROJECT.CRS) - - -# 4 Extract required destinations ---- -# -----------------------------------------------------------------------------# - -## 4.1 Tag combinations for feature types and network ---- -## ----------------------------------------------------------------------------# -# load functions for locating specific feature types -source("./functions/getDestinationTypes.R") - -# load network -NETWORK <- "./output/test/network.sqlite" #<<< CHECK FINAL NAME -NODE.LAYER <- "nodes" -LINK.LAYER <- "links" - -network.nodes <- st_read(NETWORK, layer = NODE.LAYER) -network.links <- st_read(NETWORK, layer = LINK.LAYER) - - -## 4.2 Compile point and polygon destinations ---- -## ----------------------------------------------------------------------------# -destination.layer <- function(layer) { - return( - bind_rows( - getPlayground(layer) %>% mutate(dest_type = "playground"), - getPark(layer) %>% mutate(dest_type = "park"), - getSport(layer) %>% mutate(dest_type = "sport"), - getKindergarten(layer) %>% mutate(dest_type = "kindergarten"), - getCommunity(layer) %>% mutate(dest_type = "community_centre"), - getLibrary(layer) %>% mutate(dest_type = "library"), - getPrimary(layer) %>% mutate(dest_type = "primary_school"), - getSecondary(layer) %>% mutate(dest_type = "secondary_school"), - getClinic(layer) %>% mutate(dest_type = "health_clinic"), - getDentist(layer) %>% mutate(dest_type = "dentist"), - getPharmacy(layer) %>% mutate(dest_type = "pharmacy"), - getConvenience(layer) %>% mutate(dest_type = "convenience_store"), - getSupermarket(layer) %>% mutate(dest_type = "supermarket"), - getShop(layer) %>% mutate(dest_type = "shop"), - getPost(layer) %>% mutate(dest_type = "post_office"), - getBank(layer) %>% mutate(dest_type = "bank"), - getRestaurant(layer) %>% mutate(dest_type = "restaurant"), - getCafe(layer) %>% mutate(dest_type = "cafe") - )) -} - -# create tables of destinations, and allocate unique id's (so features with -# multiple nodes can be grouped by the id where required) -destination.pt <- - bind_rows(destination.layer(points), - getStation() %>% mutate(dest_type = "railway_station")) %>% - mutate(dest_id = row_number()) - -destination.poly <- - destination.layer(polygons) %>% - mutate(dest_id = max(destination.pt$dest_id) + row_number()) - - -## 4.3 Find relevant nodes ---- -## ----------------------------------------------------------------------------# -# TO CONFIRM:- -# For all destinations except parks and schools ('small features'), relevant -# node is nearest node to point or to polygon centroid -# For parks and schools ('large features'): -# - points are buffered to 50m to create a polygon feature, -# - for buffered points and polygons, relevant nodes are all nodes within the -# feature and terminal nodes of links within 30m of boundary, or if none, -# then nearest node to boundary - -# Maybe this should be all nodes within 30m of buffered feature, and if link is within -# 30m of boundary but doesn't have a node within the buffer, then also its closest terminal -# node ??? - -dest.small <- bind_rows(destination.pt, - destination.poly %>% st_centroid()) %>% - filter(!(dest_type %in% c("park", "primary_school", "secondary_school"))) -near_node <- network.nodes$id[st_nearest_feature(dest.small, network.nodes)] -dest.small.with.nodes <- cbind(dest.small %>% st_drop_geometry(), near_node) - - -## NOTE - the code below is a simplified version which just finds nodes within -## feature or its 30m buffer, or nearest node if none - doesn't extend to terminal -## nodes of nearby features -dest.large <- bind_rows(destination.pt %>% st_buffer(50), - destination.poly) %>% - filter(dest_type %in% c("park", "primary_school", "secondary_school")) - -dest.large.found.nodes <- dest.large %>% - st_buffer(30) %>% - st_intersection(., network.nodes %>% dplyr::select(near_node = id)) - -dest.large.need.nodes <- dest.large %>% - filter(!(dest_id %in% dest.large.found.nodes$dest_id)) -near_node <- network.nodes$id[st_nearest_feature(dest.large.need.nodes, network.nodes)] - -dest.large.with.nodes <- bind_rows(dest.large.found.nodes %>% st_drop_geometry(), - cbind(dest.large.need.nodes %>% st_drop_geometry(), - near_node)) - -dest.with.nodes <- bind_rows(dest.small.with.nodes, - dest.large.with.nodes) - diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 0f25329..f8be832 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -36,6 +36,12 @@ makeNetwork<-function(outputFileName="test"){ demFile= "./data/DEM_melbourne.tif" # DEM's multiplier- set to 1 if DEM contains actual elevation ElevationMultiplier=1 + + # DESTINATIONS + # A flag for whether to add a destinations layer (drawn from OSM) or not + addDestinationLayer=T + # OSM extract for destinations, in .osm.pbf format + osmPbfExtract="./data/melbourne_australia.osm.pbf" # GTFS addGtfs=F @@ -67,6 +73,8 @@ makeNetwork<-function(outputFileName="test"){ library(tidytransit) library(hablar) library(hms) + library(osmextract) + library(tidyr) # Building the output folder structure ------------------------------------ @@ -220,6 +228,14 @@ makeNetwork<-function(outputFileName="test"){ networkDensified <- densifyNetwork(networkConnected,desnificationMaxLengh, densifyBikeways) + # adding destinations layer + if (addDestinationLayer) { + destinations <- addDestinations(networkDensified[[1]], + networkDensified[[2]], + osmPbfExtract, + outputCrs) + } + # simplify geometry so all edges are straight lines system.time(networkDirect <- makeEdgesDirect(networkDensified[[1]], @@ -278,6 +294,10 @@ makeNetwork<-function(outputFileName="test"){ networkFinal <- networkOneway + if (addDestinationLayer) { + networkFinal[[3]] <- destinations + } + # writing outputs echo("========================================================\n") echo("| **Launching Output Writing** |\n") diff --git a/functions/addDestinations.R b/functions/addDestinations.R new file mode 100644 index 0000000..a0abf38 --- /dev/null +++ b/functions/addDestinations.R @@ -0,0 +1,190 @@ +# function to create a destination layer to add to output network + +# assumes input file (OSMextract) is in .osm.pbf format, for example, +# as downloaded from https://www.interline.io/osm/extracts/ + +# uses functions for various destination types with tag combinations set out +# in 'getDestinationTypes.R' + +addDestinations <- function(nodes_current, + edges_current, + osmPbfExtract, + outputCRS) { + + # nodes_current = networkDensified[[1]] + # edges_current = networkDensified[[2]] + # osmPbfExtract = "./data/melbourne_australia.osm.pbf" + # outputCrs = 28355 + + # # check layers + # st_layers(osmPbfExtract) + # # only multipolygons, points and lines are required (not multilinestrings + # # or other_relations) + + # # check keys + # options(max.print = 2000) + # polygon.tags <- oe_get_keys(osmPbfExtract, layer = "multipolygons") %>% sort() + # point.tags <- oe_get_keys(osmPbfExtract, layer = "points") %>% sort() + # line.tags <- oe_get_keys(osmPbfExtract, layer = "lines") %>% sort() + + # reading layers ---- + # ----------------------------------# + echo("Reading in the .osm.pbf extract layers\n") + + # create gpkg file in same directory as osmPbfExtract, using the 'extra_tags' + # Note: + # - the gpkg does not need to be retained permanently, but its creation is part + # of the process of reading the layers; if already created, the reading + # process will be quicker) + # - for simplicity, the same extra tags are added for all layers, though + # some don't exist for some layer types + extra.tags <- c("access", "amenity", "building", "grades", "healthcare", + "healthcare:speciality", "isced:level", "landuse", "leisure", + "network", "operator", "operator:type", "public_transport", + "railway", "school", "shop", "social_facility", "sport", + "tourism", "train") + # oe_vectortranslate(osmPbfExtract, layer = "multipolygons", extra_tags = extra.tags) + # oe_vectortranslate(osmPbfExtract, layer = "points", extra_tags = extra.tags) + # oe_vectortranslate(osmPbfExtract, layer = "lines", extra_tags = extra.tags) + # + # # read in the .gpkg file (same directory and name as .osm.pbf file, but .gpkg extension) + # gpkg <- paste0(path_dir(osmPbfExtract), "/", + # gsub(".osm.pbf", ".gpkg", path_file(osmPbfExtract))) + # read in the layers + polygons <- oe_read(osmPbfExtract, layer = "multipolygons", extra_tags = extra.tags) %>% + st_transform(outputCrs) + points <- oe_read(osmPbfExtract, layer = "points", extra_tags = extra.tags) %>% + st_transform(outputCrs) + lines <- oe_read(osmPbfExtract, layer = "lines", extra_tags = extra.tags) %>% + st_transform(outputCrs) + + + # function to extract specific destination types from point or polygon layers ---- + # ----------------------------------# + # all the tag combination functions in 'getDestinationTypes.R' apply to both + # points and polygons, except 'railway station', which are a combination of + # point, polygon and line features + + destination.layer <- function(layer) { + return( + bind_rows( + getPlayground(layer) %>% mutate(dest_type = "playground"), + getPark(layer) %>% mutate(dest_type = "park"), + getSport(layer) %>% mutate(dest_type = "sport"), + getKindergarten(layer) %>% mutate(dest_type = "kindergarten"), + getCommunity(layer) %>% mutate(dest_type = "community_centre"), + getLibrary(layer) %>% mutate(dest_type = "library"), + getPrimary(layer) %>% mutate(dest_type = "primary_school"), + getSecondary(layer) %>% mutate(dest_type = "secondary_school"), + getClinic(layer) %>% mutate(dest_type = "health_clinic"), + getDentist(layer) %>% mutate(dest_type = "dentist"), + getPharmacy(layer) %>% mutate(dest_type = "pharmacy"), + getConvenience(layer) %>% mutate(dest_type = "convenience_store"), + getSupermarket(layer) %>% mutate(dest_type = "supermarket"), + getShop(layer) %>% mutate(dest_type = "shop"), + getPost(layer) %>% mutate(dest_type = "post_office"), + getBank(layer) %>% mutate(dest_type = "bank"), + getRestaurant(layer) %>% mutate(dest_type = "restaurant"), + getCafe(layer) %>% mutate(dest_type = "cafe") + )) + } + + # create tables of point and polygon destinations ---- + # ----------------------------------# + echo("Finding destinations and their nearby nodes\n") + + # create tables for points and polygons, and allocate unique id's (so features + # multiple multiple nodes can be grouped by the id where required) + destination.pt <- + bind_rows(destination.layer(points), + # add stations (from point, polygons and lines) to point table + getStation() %>% mutate(dest_type = "railway_station")) %>% + mutate(dest_id = row_number()) + + destination.poly <- + destination.layer(polygons) %>% + mutate(dest_id = max(destination.pt$dest_id) + row_number()) + + # # check numbers of each destination type + # chk <- full_join(destination.poly %>% + # st_drop_geometry() %>% + # group_by(dest_type) %>% + # summarise(poly = n()), + # destination.pt %>% + # st_drop_geometry() %>% + # group_by(dest_type) %>% + # summarise(pt = n()), + # by = "dest_type") + + + # find relevant nodes ---- + # For all destinations except parks and schools ('small features'), relevant + # node is nearest node to point or to polygon centroid + # For parks and schools ('large features'): + # - points are buffered to 50m to create a polygon feature, + # - for buffered points and polygons, relevant nodes are all nodes within the + # feature and terminal nodes of links within 30m of boundary, or if none, + # then nearest node to boundary + # In each case, nodes/links must be cyclable + + cyclable.links <- edges_current %>% + filter(is_cycle == 1) + cyclable.nodes <- nodes_current %>% + filter(id %in% cyclable.links$from_id | id %in% cyclable.links$to_id) + + # 'small' destinations + dest.small <- bind_rows(destination.pt, + destination.poly %>% st_centroid()) %>% + filter(!(dest_type %in% c("park", "primary_school", "secondary_school"))) + near_node <- cyclable.nodes$id[st_nearest_feature(dest.small, cyclable.nodes)] + dest.small.with.nodes <- cbind(dest.small %>% st_drop_geometry(), near_node) + + + # 'large' destinations + dest.large <- bind_rows(destination.pt %>% st_buffer(50), + destination.poly) %>% + filter(dest_type %in% c("park", "primary_school", "secondary_school")) + + # # - nodes within the feature + # dest.large.nodes.within <- dest.large %>% + # st_intersection(., cyclable.nodes %>% dplyr::select(near_node = id)) %>% + # st_drop_geometry() + + # - terminal nodes of links within feature buffered to 30m (will include any + # nodes within feature itself, as their links will fall within the buffered + # feature) + dest.large.found.nodes <- dest.large %>% + st_buffer(30) %>% + st_intersection(., cyclable.links %>% dplyr::select(from_id, to_id)) %>% + st_drop_geometry() %>% + pivot_longer(cols = c("from_id", "to_id"), + names_to = NULL, + values_to = "near_node") %>% + distinct() + + # - nearest node if none within and no links within 30m + dest.large.other <- dest.large %>% + filter(!(dest_id %in% dest.large.found.nodes$dest_id)) + near_node <- cyclable.nodes$id[st_nearest_feature(dest.large.other, cyclable.nodes)] + dest.large.other.nodes <- cbind(dest.large.other %>% st_drop_geometry(), near_node) + + # combine the large destinations + dest.large.with.nodes <- bind_rows(dest.large.found.nodes, + dest.large.other.nodes) + + + # combine all destinations for output ---- + dest.with.nodes <- bind_rows(dest.small.with.nodes, + dest.large.with.nodes) %>% + relocate(dest_id) %>% + relocate(dest_type, .after = dest_id) %>% + relocate(near_node, .after = dest_type) %>% + relocate(other_tags, .after = last_col()) %>% + + # and join nodes for locations + left_join(., nodes_current %>% dplyr::select(id), by = c("near_node" = "id")) + + return(dest.with.nodes) + +} + diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index 672a8d1..e82d7e5 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -30,6 +30,11 @@ exportSQlite <- function(networkFinal, outputDir, outputCrs){ st_write(networkFinal[[1]], paste0(outputDir,'/network.sqlite'), layer = 'nodes', driver = 'SQLite', layer_options = 'GEOMETRY=AS_XY', delete_layer = T) + if (length(networkFinal) > 2) { + st_write(networkFinal[[3]], paste0(outputDir,'/network.sqlite'), + layer = 'destinations', driver = 'SQLite', + layer_options = 'GEOMETRY=AS_XY', delete_layer = T) + } echo(paste0('Finished generating the sqlite output\n')) } @@ -64,6 +69,11 @@ exportShp <- function(networkFinal, outputDir, outputCrs){ st_write(networkFinal[[1]], paste0(shpDir,'/nodes.shp'), driver = "ESRI Shapefile", layer_options = 'GEOMETRY=AS_XY', delete_layer = T) + if (length(networkFinal) > 2) { + st_write(networkFinal[[3]], paste0(shpDir,'/destinations.shp'), + driver = "ESRI Shapefile", layer_options = 'GEOMETRY=AS_XY', + delete_layer = T) + } echo(paste0('Finished generating the ShapeFile output\n')) From 418bba6a7b8a197ef7fa5cba5ceeee897ce73729 Mon Sep 17 00:00:00 2001 From: jafshin Date: Wed, 13 Sep 2023 18:14:03 +0200 Subject: [PATCH 013/103] adding osmconver to renv and updating packages --- renv.lock | 563 +++++++++++++++++++++++++++++++----------------- renv/activate.R | 453 +++++++++++++++++++++++++++----------- 2 files changed, 692 insertions(+), 324 deletions(-) diff --git a/renv.lock b/renv.lock index c4ec6e2..5b8047f 100644 --- a/renv.lock +++ b/renv.lock @@ -1,6 +1,6 @@ { "R": { - "Version": "4.2.1", + "Version": "4.1.2", "Repositories": [ { "Name": "CRAN", @@ -14,369 +14,448 @@ "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "b2866e62bab9378c3cc9476a1954226b", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "b2866e62bab9378c3cc9476a1954226b" }, "KernSmooth": { "Package": "KernSmooth", - "Version": "2.23-20", + "Version": "2.23-22", "Source": "Repository", "Repository": "CRAN", - "Hash": "8dcfa99b14c296bc9f1fd64d52fd3ce7", - "Requirements": [] + "Requirements": [ + "R", + "stats" + ], + "Hash": "2fecebc3047322fa5930f74fae5de70f" }, "MASS": { "Package": "MASS", - "Version": "7.3-58.2", + "Version": "7.3-60", "Source": "Repository", "Repository": "CRAN", - "Hash": "e02d1a0f6122fd3e634b25b433704344", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "methods", + "stats", + "utils" + ], + "Hash": "a56a6365b3fa73293ea8d084be0d9bb0" }, "Matrix": { "Package": "Matrix", - "Version": "1.4-1", + "Version": "1.6-1", "Source": "Repository", "Repository": "CRAN", - "Hash": "699c47c606293bdfbc9fd78a93c9c8fe", "Requirements": [ - "lattice" - ] + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "cb6855ac711958ca734b75e631b2035d" }, "R6": { "Package": "R6", "Version": "2.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "470851b6d5d0ac559e9d01bb352b4021", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "470851b6d5d0ac559e9d01bb352b4021" }, "Rcpp": { "Package": "Rcpp", - "Version": "1.0.10", + "Version": "1.0.11", "Source": "Repository", "Repository": "CRAN", - "Hash": "e749cae40fa9ef469b6050959517453c", - "Requirements": [] + "Requirements": [ + "methods", + "utils" + ], + "Hash": "ae6cbbe1492f4de79c45fce06f967ce8" }, "askpass": { "Package": "askpass", - "Version": "1.1", + "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "e8a22846fff485f0be3770c2da758713", "Requirements": [ "sys" - ] - }, - "backports": { - "Package": "backports", - "Version": "1.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "c39fbec8a30d23e721980b8afb31984c", - "Requirements": [] - }, - "checkmate": { - "Package": "checkmate", - "Version": "2.1.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "147e4db6909d8814bb30f671b49d7e06", - "Requirements": [ - "backports" - ] + ], + "Hash": "cad6cf7f1d5f6e906700b9d3e718c796" }, "class": { "Package": "class", - "Version": "7.3-21", + "Version": "7.3-22", "Source": "Repository", "Repository": "CRAN", - "Hash": "8ae0d4328e2eb3a582dfd5391a3663b7", "Requirements": [ - "MASS" - ] + "MASS", + "R", + "stats", + "utils" + ], + "Hash": "f91f6b29f38b8c280f2b9477787d4bb2" }, "classInt": { "Package": "classInt", - "Version": "0.4-8", + "Version": "0.4-10", "Source": "Repository", "Repository": "CRAN", - "Hash": "298fa500d773db0845935cd73bfd9c2e", "Requirements": [ "KernSmooth", + "R", "class", - "e1071" - ] + "e1071", + "grDevices", + "graphics", + "stats" + ], + "Hash": "f5a40793b1ae463a7ffb3902a95bf864" }, "cli": { "Package": "cli", - "Version": "3.6.0", + "Version": "3.6.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "3177a5a16c243adc199ba33117bd9657", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "89e6d8219950eac806ae0c489052048a" }, "cpp11": { "Package": "cpp11", - "Version": "0.4.3", + "Version": "0.4.6", "Source": "Repository", "Repository": "CRAN", - "Hash": "ed588261931ee3be2c700d22e94a29ab", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "707fae4bbf73697ec8d85f9d7076c061" }, "curl": { "Package": "curl", - "Version": "5.0.0", + "Version": "5.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "e4f97056611e8e6b8b852d13b7400cf1", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "511bacbfa153a15251166b463b4da4f9" }, "data.table": { "Package": "data.table", - "Version": "1.14.6", + "Version": "1.14.8", "Source": "Repository", "Repository": "CRAN", - "Hash": "aecef50008ea7b57c76f1cb5c127fb02", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "b4c06e554f33344e044ccd7fdca750a9" }, "digest": { "Package": "digest", - "Version": "0.6.31", + "Version": "0.6.33", "Source": "Repository", "Repository": "CRAN", - "Hash": "8b708f296afd9ae69f450f9640be8990", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "b18a9cf3c003977b0cc49d5e76ebe48d" }, "dplyr": { "Package": "dplyr", - "Version": "1.1.0", + "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "d3c34618017e7ae252d46d79a1b9ec32", "Requirements": [ + "R", "R6", "cli", "generics", "glue", "lifecycle", "magrittr", + "methods", "pillar", "rlang", "tibble", "tidyselect", + "utils", "vctrs" - ] + ], + "Hash": "e85ffbebaad5f70e1a2e2ef4302b4949" }, "e1071": { "Package": "e1071", - "Version": "1.7-12", + "Version": "1.7-13", "Source": "Repository", "Repository": "CRAN", - "Hash": "0d776df7577206e100c2c4b508208b10", "Requirements": [ "class", - "proxy" - ] - }, - "ellipsis": { - "Package": "ellipsis", - "Version": "0.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077", - "Requirements": [ - "rlang" - ] + "grDevices", + "graphics", + "methods", + "proxy", + "stats", + "utils" + ], + "Hash": "1046cb48d06cb40c2900d8878f03a0fe" }, "fansi": { "Package": "fansi", "Version": "1.0.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "utils" + ], + "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" }, "fs": { "Package": "fs", - "Version": "1.6.0", + "Version": "1.6.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "0120e8c933bace1141e0b0d376b0c010", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "47b5f30c720c23999b913a1a635cf0bb" }, "generics": { "Package": "generics", "Version": "0.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "15e9634c0fcd294799e9b2e929ed1b86", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "15e9634c0fcd294799e9b2e929ed1b86" }, "geodist": { "Package": "geodist", "Version": "0.0.8", "Source": "Repository", "Repository": "CRAN", - "Hash": "be85dc7cec76dbd9a9478724ba518146", - "Requirements": [] + "Hash": "be85dc7cec76dbd9a9478724ba518146" }, "glue": { "Package": "glue", "Version": "1.6.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" }, "gtfsio": { "Package": "gtfsio", - "Version": "1.0.0", + "Version": "1.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "dd7277616c17c88d4061d84f80c1d78a", "Requirements": [ "data.table", + "utils", "zip" - ] + ], + "Hash": "d02f8456a746098e0456cfd98db473c5" }, "hablar": { "Package": "hablar", - "Version": "0.3.1", + "Version": "0.3.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "2330dfd884d745b504b21f72c84c5ac5", "Requirements": [ "dplyr", "lubridate", "purrr" - ] + ], + "Hash": "c7a6a49207405553fd26eff865d94360" }, "hms": { "Package": "hms", - "Version": "1.1.2", + "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "41100392191e1244b887878b533eea91", "Requirements": [ - "ellipsis", "lifecycle", + "methods", "pkgconfig", "rlang", "vctrs" - ] + ], + "Hash": "b59377caa7ed00fa41808342002138f9" }, "httr": { "Package": "httr", - "Version": "1.4.4", + "Version": "1.4.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "57557fac46471f0dbbf44705cc6a5c8c", "Requirements": [ + "R", "R6", "curl", "jsonlite", "mime", "openssl" - ] + ], + "Hash": "ac107251d9d9fd72f0ca8049988f1d7f" }, "igraph": { "Package": "igraph", - "Version": "1.3.5", + "Version": "1.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "132b06d7060f11ba8b4c7e7f385e9b7a", "Requirements": [ "Matrix", + "R", + "cli", + "cpp11", + "grDevices", + "graphics", + "lifecycle", "magrittr", + "methods", "pkgconfig", - "rlang" - ] + "rlang", + "stats", + "utils" + ], + "Hash": "80401cb5ec513e8ddc56764d03f63669" }, "jsonlite": { "Package": "jsonlite", - "Version": "1.8.4", + "Version": "1.8.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "a4269a09a9b865579b2635c77e572374", - "Requirements": [] + "Requirements": [ + "methods" + ], + "Hash": "266a20443ca13c65688b2116d5220f76" }, "lattice": { "Package": "lattice", - "Version": "0.20-45", + "Version": "0.21-8", "Source": "Repository", "Repository": "CRAN", - "Hash": "b64cdbb2b340437c4ee047a1f4c4377b", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "grid", + "stats", + "utils" + ], + "Hash": "0b8a6d63c8770f02a8b5635f3c431e6b" }, "lifecycle": { "Package": "lifecycle", "Version": "1.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "001cecbeac1cff9301bdc3775ee46a86", "Requirements": [ + "R", "cli", "glue", "rlang" - ] + ], + "Hash": "001cecbeac1cff9301bdc3775ee46a86" }, "lubridate": { "Package": "lubridate", - "Version": "1.9.1", + "Version": "1.9.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "88ad585eb49669b7f2db3f5ef3c8307d", "Requirements": [ + "R", "generics", + "methods", "timechange" - ] + ], + "Hash": "e25f18436e3efd42c7c590a1c4c15390" }, "lwgeom": { "Package": "lwgeom", - "Version": "0.2-11", + "Version": "0.2-13", "Source": "Repository", "Repository": "CRAN", - "Hash": "14cf492ea07bf498fb34c80563ba593b", "Requirements": [ + "R", "Rcpp", "sf", "units" - ] + ], + "Hash": "9804362cc0267990ac61a85edeca73ed" }, "magrittr": { "Package": "magrittr", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "7ce2733a9826b3aeb1775d56fd305472", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "7ce2733a9826b3aeb1775d56fd305472" }, "mime": { "Package": "mime", "Version": "0.12", "Source": "Repository", "Repository": "CRAN", - "Hash": "18e9c28c1d3ca1560ce30658b22ce104", - "Requirements": [] + "Requirements": [ + "tools" + ], + "Hash": "18e9c28c1d3ca1560ce30658b22ce104" }, "openssl": { "Package": "openssl", - "Version": "2.0.5", + "Version": "2.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "b04c27110bf367b4daa93f34f3d58e75", "Requirements": [ "askpass" - ] + ], + "Hash": "273a6bb4a9844c296a459d2176673270" + }, + "osmextract": { + "Package": "osmextract", + "Version": "0.5.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "httr", + "jsonlite", + "sf", + "tools", + "utils" + ], + "Hash": "325765c4927551138994b1722d6094fc" }, "pillar": { "Package": "pillar", - "Version": "1.8.1", + "Version": "1.9.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "f2316df30902c81729ae9de95ad5a608", "Requirements": [ "cli", "fansi", @@ -384,128 +463,171 @@ "lifecycle", "rlang", "utf8", + "utils", "vctrs" - ] + ], + "Hash": "15da5a8412f317beeee6175fbc76f4bb" }, "pkgconfig": { "Package": "pkgconfig", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "01f28d4278f15c76cddbea05899c5d6f", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "01f28d4278f15c76cddbea05899c5d6f" }, "proxy": { "Package": "proxy", "Version": "0.4-27", "Source": "Repository", "Repository": "CRAN", - "Hash": "e0ef355c12942cf7a6b91a6cfaea8b3e", - "Requirements": [] + "Requirements": [ + "R", + "stats", + "utils" + ], + "Hash": "e0ef355c12942cf7a6b91a6cfaea8b3e" }, "purrr": { "Package": "purrr", - "Version": "1.0.1", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "d71c815267c640f17ddbf7f16144b4bb", "Requirements": [ + "R", "cli", "lifecycle", "magrittr", "rlang", "vctrs" - ] + ], + "Hash": "1cba04a4e9414bdefc9dcaa99649a8dc" }, "raster": { "Package": "raster", - "Version": "3.6-14", + "Version": "3.6-23", "Source": "Repository", "Repository": "CRAN", - "Hash": "2406e2380237f4dcf84e83260419895a", "Requirements": [ + "R", "Rcpp", + "methods", "sp", "terra" - ] + ], + "Hash": "337d6d70f7d6bf78df236a5a53f09db0" }, "renv": { "Package": "renv", - "Version": "0.16.0", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "c9e8442ab69bc21c9697ecf856c1e6c7", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "4b22ac016fe54028b88d0c68badbd061" }, "rgdal": { "Package": "rgdal", - "Version": "1.6-4", + "Version": "1.6-7", "Source": "Repository", "Repository": "CRAN", - "Hash": "92183bff0bac3a711fde35a22c1bf45b", "Requirements": [ - "sp" - ] + "R", + "grDevices", + "graphics", + "methods", + "sp", + "stats", + "utils" + ], + "Hash": "10b777236c9e7855bc9dea8e347e30b7" }, "rlang": { "Package": "rlang", - "Version": "1.0.6", + "Version": "1.1.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "4ed1f8336c8d52c3e750adcdc57228a7", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "a85c767b55f0bf9b7ad16c6d7baee5bb" }, "s2": { "Package": "s2", - "Version": "1.1.2", + "Version": "1.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "5cca323babe990f99d5bc3402f64b905", "Requirements": [ + "R", "Rcpp", "wk" - ] + ], + "Hash": "f1cbe03bb3346f8e817518ffa20f9f5a" }, "sf": { "Package": "sf", - "Version": "1.0-9", + "Version": "1.0-14", "Source": "Repository", "Repository": "CRAN", - "Hash": "8a348490fefb623e7935f405230900a7", "Requirements": [ "DBI", + "R", "Rcpp", "classInt", + "grDevices", + "graphics", + "grid", "magrittr", + "methods", "s2", - "units" - ] + "stats", + "tools", + "units", + "utils" + ], + "Hash": "e2111252a76984ca50bf8d6314348681" }, "sp": { "Package": "sp", - "Version": "1.6-0", + "Version": "2.0-0", "Source": "Repository", "Repository": "CRAN", - "Hash": "6674e075a078d9c3bde8ba800367347c", "Requirements": [ - "lattice" - ] + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "2551981e6f85d59c81652bf654d6c3ca" }, "stringi": { "Package": "stringi", "Version": "1.7.12", "Source": "Repository", "Repository": "CRAN", - "Hash": "ca8bd84263c77310739d2cf64d84d7c9", - "Requirements": [] + "Requirements": [ + "R", + "stats", + "tools", + "utils" + ], + "Hash": "ca8bd84263c77310739d2cf64d84d7c9" }, "stringr": { "Package": "stringr", "Version": "1.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8", "Requirements": [ + "R", "cli", "glue", "lifecycle", @@ -513,140 +635,179 @@ "rlang", "stringi", "vctrs" - ] + ], + "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" }, "sys": { "Package": "sys", - "Version": "3.4.1", + "Version": "3.4.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "34c16f1ef796057bfa06d3f4ff818a5d", - "Requirements": [] + "Hash": "3a1be13d68d47a8cd0bfd74739ca1555" }, "terra": { "Package": "terra", - "Version": "1.7-3", + "Version": "1.7-46", "Source": "Repository", "Repository": "CRAN", - "Hash": "230c4bfbb6aae92d92ef9762b683541e", "Requirements": [ - "Rcpp" - ] + "R", + "Rcpp", + "methods" + ], + "Hash": "d15182a8a20ffc5880e721bfa1bf4ce9" }, "tibble": { "Package": "tibble", - "Version": "3.1.8", + "Version": "3.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "56b6934ef0f8c68225949a8672fe1a8f", "Requirements": [ + "R", "fansi", "lifecycle", "magrittr", + "methods", "pillar", "pkgconfig", "rlang", + "utils", "vctrs" - ] + ], + "Hash": "a84e2cc86d07289b3b6f5069df7a004c" + }, + "tidyr": { + "Package": "tidyr", + "Version": "1.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "cpp11", + "dplyr", + "glue", + "lifecycle", + "magrittr", + "purrr", + "rlang", + "stringr", + "tibble", + "tidyselect", + "utils", + "vctrs" + ], + "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" }, "tidyselect": { "Package": "tidyselect", "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "79540e5fcd9e0435af547d885f184fd5", "Requirements": [ + "R", "cli", "glue", "lifecycle", "rlang", "vctrs", "withr" - ] + ], + "Hash": "79540e5fcd9e0435af547d885f184fd5" }, "tidytransit": { "Package": "tidytransit", - "Version": "1.4", + "Version": "1.6.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "9e685f7c56c2e2c2f92438f4ac6b1644", "Requirements": [ - "checkmate", + "R", "data.table", "digest", "dplyr", "geodist", "gtfsio", "hms", - "httr", "rlang", "sf" - ] + ], + "Hash": "a9255650349cc3438c03020e509c1074" }, "timechange": { "Package": "timechange", "Version": "0.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "8548b44f79a35ba1791308b61e6012d7", "Requirements": [ + "R", "cpp11" - ] + ], + "Hash": "8548b44f79a35ba1791308b61e6012d7" }, "units": { "Package": "units", - "Version": "0.8-1", + "Version": "0.8-3", "Source": "Repository", "Repository": "CRAN", - "Hash": "81433967f9b34a120a4f5a5a016cd5ed", "Requirements": [ + "R", "Rcpp" - ] + ], + "Hash": "880ebc99e4d8f7e5f3caeb2f12632583" }, "utf8": { "Package": "utf8", "Version": "1.2.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "1fe17157424bb09c48a8b3b550c753bc", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "1fe17157424bb09c48a8b3b550c753bc" }, "vctrs": { "Package": "vctrs", - "Version": "0.5.2", + "Version": "0.6.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "e4ffa94ceed5f124d429a5a5f0f5b378", "Requirements": [ + "R", "cli", "glue", "lifecycle", "rlang" - ] + ], + "Hash": "d0ef2856b83dc33ea6e255caf6229ee2" }, "withr": { "Package": "withr", "Version": "2.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "c0e49a9760983e81e55cdd9be92e7182", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "stats" + ], + "Hash": "c0e49a9760983e81e55cdd9be92e7182" }, "wk": { "Package": "wk", - "Version": "0.7.1", + "Version": "0.8.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "141385279f2cd7faa6a3eccd8d1279dd", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "aaf7e20556e3125a09d53453814ad339" }, "zip": { "Package": "zip", - "Version": "2.2.2", + "Version": "2.3.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "c42bfcec3fa6a0cce17ce1f8bc684f88", - "Requirements": [] + "Hash": "d98c94dacb7e0efcf83b0a133a705504" } } } diff --git a/renv/activate.R b/renv/activate.R index 019b5a6..2969c73 100644 --- a/renv/activate.R +++ b/renv/activate.R @@ -2,11 +2,27 @@ local({ # the requested version of renv - version <- "0.16.0" + version <- "1.0.2" + attr(version, "sha") <- NULL # the project directory project <- getwd() + # use start-up diagnostics if enabled + diagnostics <- Sys.getenv("RENV_STARTUP_DIAGNOSTICS", unset = "FALSE") + if (diagnostics) { + start <- Sys.time() + profile <- tempfile("renv-startup-", fileext = ".Rprof") + utils::Rprof(profile) + on.exit({ + utils::Rprof(NULL) + elapsed <- signif(difftime(Sys.time(), start, units = "auto"), digits = 2L) + writeLines(sprintf("- renv took %s to run the autoloader.", format(elapsed))) + writeLines(sprintf("- Profile: %s", profile)) + print(utils::summaryRprof(profile)) + }, add = TRUE) + } + # figure out whether the autoloader is enabled enabled <- local({ @@ -60,21 +76,75 @@ local({ # load bootstrap tools `%||%` <- function(x, y) { - if (is.environment(x) || length(x)) x else y + if (is.null(x)) y else x + } + + catf <- function(fmt, ..., appendLF = TRUE) { + + quiet <- getOption("renv.bootstrap.quiet", default = FALSE) + if (quiet) + return(invisible()) + + msg <- sprintf(fmt, ...) + cat(msg, file = stdout(), sep = if (appendLF) "\n" else "") + + invisible(msg) + + } + + header <- function(label, + ..., + prefix = "#", + suffix = "-", + n = min(getOption("width"), 78)) + { + label <- sprintf(label, ...) + n <- max(n - nchar(label) - nchar(prefix) - 2L, 8L) + if (n <= 0) + return(paste(prefix, label)) + + tail <- paste(rep.int(suffix, n), collapse = "") + paste0(prefix, " ", label, " ", tail) + + } + + startswith <- function(string, prefix) { + substring(string, 1, nchar(prefix)) == prefix } bootstrap <- function(version, library) { + friendly <- renv_bootstrap_version_friendly(version) + section <- header(sprintf("Bootstrapping renv %s", friendly)) + catf(section) + # attempt to download renv - tarball <- tryCatch(renv_bootstrap_download(version), error = identity) - if (inherits(tarball, "error")) - stop("failed to download renv ", version) + catf("- Downloading renv ... ", appendLF = FALSE) + withCallingHandlers( + tarball <- renv_bootstrap_download(version), + error = function(err) { + catf("FAILED") + stop("failed to download:\n", conditionMessage(err)) + } + ) + catf("OK") + on.exit(unlink(tarball), add = TRUE) # now attempt to install - status <- tryCatch(renv_bootstrap_install(version, tarball, library), error = identity) - if (inherits(status, "error")) - stop("failed to install renv ", version) + catf("- Installing renv ... ", appendLF = FALSE) + withCallingHandlers( + status <- renv_bootstrap_install(version, tarball, library), + error = function(err) { + catf("FAILED") + stop("failed to install:\n", conditionMessage(err)) + } + ) + catf("OK") + + # add empty line to break up bootstrapping from normal output + catf("") + return(invisible()) } renv_bootstrap_tests_running <- function() { @@ -83,28 +153,32 @@ local({ renv_bootstrap_repos <- function() { + # get CRAN repository + cran <- getOption("renv.repos.cran", "https://cloud.r-project.org") + # check for repos override repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA) - if (!is.na(repos)) + if (!is.na(repos)) { + + # check for RSPM; if set, use a fallback repository for renv + rspm <- Sys.getenv("RSPM", unset = NA) + if (identical(rspm, repos)) + repos <- c(RSPM = rspm, CRAN = cran) + return(repos) + } + # check for lockfile repositories repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity) if (!inherits(repos, "error") && length(repos)) return(repos) - # if we're testing, re-use the test repositories - if (renv_bootstrap_tests_running()) - return(getOption("renv.tests.repos")) - # retrieve current repos repos <- getOption("repos") # ensure @CRAN@ entries are resolved - repos[repos == "@CRAN@"] <- getOption( - "renv.repos.cran", - "https://cloud.r-project.org" - ) + repos[repos == "@CRAN@"] <- cran # add in renv.bootstrap.repos if set default <- c(FALLBACK = "https://cloud.r-project.org") @@ -143,33 +217,34 @@ local({ renv_bootstrap_download <- function(version) { - # if the renv version number has 4 components, assume it must - # be retrieved via github - nv <- numeric_version(version) - components <- unclass(nv)[[1]] - - # if this appears to be a development version of 'renv', we'll - # try to restore from github - dev <- length(components) == 4L - - # begin collecting different methods for finding renv - methods <- c( - renv_bootstrap_download_tarball, - if (dev) - renv_bootstrap_download_github - else c( - renv_bootstrap_download_cran_latest, - renv_bootstrap_download_cran_archive + sha <- attr(version, "sha", exact = TRUE) + + methods <- if (!is.null(sha)) { + + # attempting to bootstrap a development version of renv + c( + function() renv_bootstrap_download_tarball(sha), + function() renv_bootstrap_download_github(sha) ) - ) + + } else { + + # attempting to bootstrap a release version of renv + c( + function() renv_bootstrap_download_tarball(version), + function() renv_bootstrap_download_cran_latest(version), + function() renv_bootstrap_download_cran_archive(version) + ) + + } for (method in methods) { - path <- tryCatch(method(version), error = identity) + path <- tryCatch(method(), error = identity) if (is.character(path) && file.exists(path)) return(path) } - stop("failed to download renv ", version) + stop("All download methods failed") } @@ -233,8 +308,6 @@ local({ type <- spec$type repos <- spec$repos - message("* Downloading renv ", version, " ... ", appendLF = FALSE) - baseurl <- utils::contrib.url(repos = repos, type = type) ext <- if (identical(type, "source")) ".tar.gz" @@ -251,13 +324,10 @@ local({ condition = identity ) - if (inherits(status, "condition")) { - message("FAILED") + if (inherits(status, "condition")) return(FALSE) - } # report success and return - message("OK (downloaded ", type, ")") destfile } @@ -314,8 +384,6 @@ local({ urls <- file.path(repos, "src/contrib/Archive/renv", name) destfile <- file.path(tempdir(), name) - message("* Downloading renv ", version, " ... ", appendLF = FALSE) - for (url in urls) { status <- tryCatch( @@ -323,14 +391,11 @@ local({ condition = identity ) - if (identical(status, 0L)) { - message("OK") + if (identical(status, 0L)) return(destfile) - } } - message("FAILED") return(FALSE) } @@ -344,8 +409,7 @@ local({ return() # allow directories - info <- file.info(tarball, extra_cols = FALSE) - if (identical(info$isdir, TRUE)) { + if (dir.exists(tarball)) { name <- sprintf("renv_%s.tar.gz", version) tarball <- file.path(tarball, name) } @@ -354,7 +418,7 @@ local({ if (!file.exists(tarball)) { # let the user know we weren't able to honour their request - fmt <- "* RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." + fmt <- "- RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." msg <- sprintf(fmt, tarball) warning(msg) @@ -363,10 +427,7 @@ local({ } - fmt <- "* Bootstrapping with tarball at path '%s'." - msg <- sprintf(fmt, tarball) - message(msg) - + catf("- Using local tarball '%s'.", tarball) tarball } @@ -393,8 +454,6 @@ local({ on.exit(do.call(base::options, saved), add = TRUE) } - message("* Downloading renv ", version, " from GitHub ... ", appendLF = FALSE) - url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version) name <- sprintf("renv_%s.tar.gz", version) destfile <- file.path(tempdir(), name) @@ -404,26 +463,105 @@ local({ condition = identity ) - if (!identical(status, 0L)) { - message("FAILED") + if (!identical(status, 0L)) return(FALSE) - } - message("OK") + renv_bootstrap_download_augment(destfile) + return(destfile) } + # Add Sha to DESCRIPTION. This is stop gap until #890, after which we + # can use renv::install() to fully capture metadata. + renv_bootstrap_download_augment <- function(destfile) { + sha <- renv_bootstrap_git_extract_sha1_tar(destfile) + if (is.null(sha)) { + return() + } + + # Untar + tempdir <- tempfile("renv-github-") + on.exit(unlink(tempdir, recursive = TRUE), add = TRUE) + untar(destfile, exdir = tempdir) + pkgdir <- dir(tempdir, full.names = TRUE)[[1]] + + # Modify description + desc_path <- file.path(pkgdir, "DESCRIPTION") + desc_lines <- readLines(desc_path) + remotes_fields <- c( + "RemoteType: github", + "RemoteHost: api.github.com", + "RemoteRepo: renv", + "RemoteUsername: rstudio", + "RemotePkgRef: rstudio/renv", + paste("RemoteRef: ", sha), + paste("RemoteSha: ", sha) + ) + writeLines(c(desc_lines[desc_lines != ""], remotes_fields), con = desc_path) + + # Re-tar + local({ + old <- setwd(tempdir) + on.exit(setwd(old), add = TRUE) + + tar(destfile, compression = "gzip") + }) + invisible() + } + + # Extract the commit hash from a git archive. Git archives include the SHA1 + # hash as the comment field of the tarball pax extended header + # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) + # For GitHub archives this should be the first header after the default one + # (512 byte) header. + renv_bootstrap_git_extract_sha1_tar <- function(bundle) { + + # open the bundle for reading + # We use gzcon for everything because (from ?gzcon) + # > Reading from a connection which does not supply a 'gzip' magic + # > header is equivalent to reading from the original connection + conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) + on.exit(close(conn)) + + # The default pax header is 512 bytes long and the first pax extended header + # with the comment should be 51 bytes long + # `52 comment=` (11 chars) + 40 byte SHA1 hash + len <- 0x200 + 0x33 + res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) + + if (grepl("^52 comment=", res)) { + sub("52 comment=", "", res) + } else { + NULL + } + } + renv_bootstrap_install <- function(version, tarball, library) { # attempt to install it into project library - message("* Installing renv ", version, " ... ", appendLF = FALSE) dir.create(library, showWarnings = FALSE, recursive = TRUE) + output <- renv_bootstrap_install_impl(library, tarball) + + # check for successful install + status <- attr(output, "status") + if (is.null(status) || identical(status, 0L)) + return(status) + + # an error occurred; report it + header <- "installation of renv failed" + lines <- paste(rep.int("=", nchar(header)), collapse = "") + text <- paste(c(header, lines, output), collapse = "\n") + stop(text) + + } + + renv_bootstrap_install_impl <- function(library, tarball) { # invoke using system2 so we can capture and report output bin <- R.home("bin") exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R" - r <- file.path(bin, exe) + R <- file.path(bin, exe) args <- c( "--vanilla", "CMD", "INSTALL", "--no-multiarch", @@ -431,19 +569,7 @@ local({ shQuote(path.expand(tarball)) ) - output <- system2(r, args, stdout = TRUE, stderr = TRUE) - message("Done!") - - # check for successful install - status <- attr(output, "status") - if (is.numeric(status) && !identical(status, 0L)) { - header <- "Error installing renv:" - lines <- paste(rep.int("=", nchar(header)), collapse = "") - text <- c(header, lines, output) - writeLines(text, con = stderr()) - } - - status + system2(R, args, stdout = TRUE, stderr = TRUE) } @@ -653,34 +779,62 @@ local({ } - renv_bootstrap_validate_version <- function(version) { + renv_bootstrap_validate_version <- function(version, description = NULL) { - loadedversion <- utils::packageDescription("renv", fields = "Version") - if (version == loadedversion) - return(TRUE) + # resolve description file + # + # avoid passing lib.loc to `packageDescription()` below, since R will + # use the loaded version of the package by default anyhow. note that + # this function should only be called after 'renv' is loaded + # https://github.com/rstudio/renv/issues/1625 + description <- description %||% packageDescription("renv") - # assume four-component versions are from GitHub; three-component - # versions are from CRAN - components <- strsplit(loadedversion, "[.-]")[[1]] - remote <- if (length(components) == 4L) - paste("rstudio/renv", loadedversion, sep = "@") + # check whether requested version 'version' matches loaded version of renv + sha <- attr(version, "sha", exact = TRUE) + valid <- if (!is.null(sha)) + renv_bootstrap_validate_version_dev(sha, description) else - paste("renv", loadedversion, sep = "@") + renv_bootstrap_validate_version_release(version, description) + + if (valid) + return(TRUE) + + # the loaded version of renv doesn't match the requested version; + # give the user instructions on how to proceed + remote <- if (!is.null(description[["RemoteSha"]])) { + paste("rstudio/renv", description[["RemoteSha"]], sep = "@") + } else { + paste("renv", description[["Version"]], sep = "@") + } + + # display both loaded version + sha if available + friendly <- renv_bootstrap_version_friendly( + version = description[["Version"]], + sha = description[["RemoteSha"]] + ) fmt <- paste( "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.", - "Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", - "Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", + "- Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", + "- Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", sep = "\n" ) - - msg <- sprintf(fmt, loadedversion, version, remote) - warning(msg, call. = FALSE) + catf(fmt, friendly, renv_bootstrap_version_friendly(version), remote) FALSE } + renv_bootstrap_validate_version_dev <- function(version, description) { + expected <- description[["RemoteSha"]] + is.character(expected) && startswith(expected, version) + } + + renv_bootstrap_validate_version_release <- function(version, description) { + expected <- description[["Version"]] + is.character(expected) && identical(expected, version) + } + renv_bootstrap_hash_text <- function(text) { hashfile <- tempfile("renv-hash-") @@ -700,6 +854,12 @@ local({ # warn if the version of renv loaded does not match renv_bootstrap_validate_version(version) + # execute renv load hooks, if any + hooks <- getHook("renv::autoload") + for (hook in hooks) + if (is.function(hook)) + tryCatch(hook(), error = warnify) + # load the project renv::load(project) @@ -839,14 +999,79 @@ local({ } + renv_bootstrap_version_friendly <- function(version, shafmt = NULL, sha = NULL) { + sha <- sha %||% attr(version, "sha", exact = TRUE) + parts <- c(version, sprintf(shafmt %||% " [sha: %s]", substring(sha, 1L, 7L))) + paste(parts, collapse = "") + } + + renv_bootstrap_exec <- function(project, libpath, version) { + if (!renv_bootstrap_load(project, libpath, version)) + renv_bootstrap_run(version, libpath) + } + + renv_bootstrap_run <- function(version, libpath) { + + # perform bootstrap + bootstrap(version, libpath) + + # exit early if we're just testing bootstrap + if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) + return(TRUE) + + # try again to load + if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { + return(renv::load(project = getwd())) + } + + # failed to download or load renv; warn the user + msg <- c( + "Failed to find an renv installation: the project will not be loaded.", + "Use `renv::activate()` to re-initialize the project." + ) + + warning(paste(msg, collapse = "\n"), call. = FALSE) + + } + + + renv_bootstrap_in_rstudio <- function() { + commandArgs()[[1]] == "RStudio" + } + + # Used to work around buglet in RStudio if hook uses readline + renv_bootstrap_flush_console <- function() { + tryCatch({ + tools <- as.environment("tools:rstudio") + tools$.rs.api.sendToConsole("", echo = FALSE, focus = FALSE) + }, error = function(cnd) {}) + } renv_json_read <- function(file = NULL, text = NULL) { + jlerr <- NULL + # if jsonlite is loaded, use that instead - if ("jsonlite" %in% loadedNamespaces()) - renv_json_read_jsonlite(file, text) + if ("jsonlite" %in% loadedNamespaces()) { + + json <- catch(renv_json_read_jsonlite(file, text)) + if (!inherits(json, "error")) + return(json) + + jlerr <- json + + } + + # otherwise, fall back to the default JSON reader + json <- catch(renv_json_read_default(file, text)) + if (!inherits(json, "error")) + return(json) + + # report an error + if (!is.null(jlerr)) + stop(jlerr) else - renv_json_read_default(file, text) + stop(json) } @@ -960,35 +1185,17 @@ local({ # construct full libpath libpath <- file.path(root, prefix) - # attempt to load - if (renv_bootstrap_load(project, libpath, version)) - return(TRUE) - - # load failed; inform user we're about to bootstrap - prefix <- paste("# Bootstrapping renv", version) - postfix <- paste(rep.int("-", 77L - nchar(prefix)), collapse = "") - header <- paste(prefix, postfix) - message(header) - - # perform bootstrap - bootstrap(version, libpath) - - # exit early if we're just testing bootstrap - if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) - return(TRUE) - - # try again to load - if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { - message("* Successfully installed and loaded renv ", version, ".") - return(renv::load()) + if (renv_bootstrap_in_rstudio()) { + # RStudio only updates console once .Rprofile is finished, so + # instead run code on sessionInit + setHook("rstudio.sessionInit", function(...) { + renv_bootstrap_exec(project, libpath, version) + renv_bootstrap_flush_console() + }) + } else { + renv_bootstrap_exec(project, libpath, version) } - # failed to download or load renv; warn the user - msg <- c( - "Failed to find an renv installation: the project will not be loaded.", - "Use `renv::activate()` to re-initialize the project." - ) - - warning(paste(msg, collapse = "\n"), call. = FALSE) + invisible() }) From 2b87f27a92bca64c8cd80b9c1faa26ebd967c59f Mon Sep 17 00:00:00 2001 From: jafshin Date: Thu, 14 Sep 2023 13:14:18 +0200 Subject: [PATCH 014/103] should not remove links with parking info - might be on street parking --- processOSM.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/processOSM.sh b/processOSM.sh index 083854b..b5f0f4c 100755 --- a/processOSM.sh +++ b/processOSM.sh @@ -30,7 +30,6 @@ ogr2ogr -update -overwrite -nln roads -f "SQLite" -dsco SPATIALITE=YES \ (other_tags IS NULL OR (other_tags NOT LIKE '%busbar%' AND \ other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%parking%' AND \ other_tags NOT LIKE '%\"access\"=>\"private\"%')) " \ ./data/temp.sqlite $extract # highway NOT LIKE '%service%' AND \ @@ -56,7 +55,6 @@ ogr2ogr -update -overwrite -nln pt -f "SQLite" -dialect SQLite -sql \ WHERE other_tags LIKE '%railway%' AND \ other_tags NOT LIKE '%busbar%' AND \ other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%parking%' AND \ other_tags NOT LIKE '%miniature%' AND \ other_tags NOT LIKE '%proposed%' AND \ other_tags NOT LIKE '%disused%' AND \ From 8f1154569613e5b978d4b4b27c5561eafdaa1460 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 15 Sep 2023 11:15:04 +1000 Subject: [PATCH 015/103] surface impedance --- CyclingImpedances.R | 15 ++++++--- functions/addSurfImped.R | 71 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 4 deletions(-) create mode 100644 functions/addSurfImped.R diff --git a/CyclingImpedances.R b/CyclingImpedances.R index e38e27d..e0888fb 100644 --- a/CyclingImpedances.R +++ b/CyclingImpedances.R @@ -31,6 +31,8 @@ addImpedances <- function() { library(dplyr) library(sf) library(fs) + library(osmextract) + library(stringr) dir_walk(path="./functions/",source, recurse=T, type = "file") @@ -48,7 +50,7 @@ addImpedances <- function() { traffic.links, traffic.multiplier) ## TO DO - maybe traffic can just be joined on link_id? See whether traffic - ## file neatly uses the link_id's from the one-way input + ## file neatly uses the link_id's from the one-way input (also in cycling-adoption) echo("Adding LTS and its impedance\n") networkLTS <- addLTS(networkTraffic[[1]], networkTraffic[[2]]) @@ -59,12 +61,17 @@ addImpedances <- function() { networkSlope <- addSlopeImped(networkLTS[[1]], networkLTS[[2]]) + # Add surface impedance -------------------------------------------------------- + echo("Adding surface impedance") + networkSurf <- addSurfImped(networkSlope[[1]], networkSlope[[2]]) + + # Calculate total weight ----------------------------------------------------- echo("Calculating cycling weight") networkWeighted <- - list(networkSlope[[1]], - networkSlope[[2]] %>% - mutate(cycle.weight = length + LTS_imped + slope_imped)) + list(networkSurf[[1]], + networkSurf[[2]] %>% + mutate(cycle.weight = length + LTS_imped + slope_imped + surf_imped)) # write output --------------------------------------------------------------- diff --git a/functions/addSurfImped.R b/functions/addSurfImped.R new file mode 100644 index 0000000..96f23ba --- /dev/null +++ b/functions/addSurfImped.R @@ -0,0 +1,71 @@ +# function to add impedance to network links, based on surface +addSurfImped <- function(nodes_current, edges_current) { + + edges_current <- edges_current %>% + + # add surface level, based on OSM surface categories + # [CONSIDER REVISING THIS LIST WHEN USING IN OTHER CITIES - SEE + # 'checkSurfTypes()' BELOW FOR A FUNCTION TO HELP INVESTIGATE] + mutate(surf_lvl = case_when( + str_detect(tolower(surface), + "bluestone|boardwalk|brick|cobblestone|composite|fibre|metal|paving|stone|plastic|sett|stone|tile|timber|wood") + ~ "rough", + str_detect(tolower(surface), + "clay|compacted|dirt|earth|grass|gravel|mud|rock|sand|turf|unpacved|unpaved|unsealed") + ~ "unpaved", + TRUE ~ "smooth" + )) %>% + + # add impedance from surface level [ILLUSTRATIVE IMPEDANCES ONLY, TO BE + # REVISED BASED ON FURTHER RESEARCH OR SURVEY RESULTS] + mutate(surf_imped = case_when( + surf_lvl == "smooth" ~ 0, + surf_lvl == "unpaved" ~ length * 0.1, + surf_lvl == "rough" ~ length * 0.2 + )) + + return(list(nodes_current, edges_current)) +} + + +# investigation function to check number and type of surfaces +checkSurfTypes <- function(osmPbfExtract) { + + # osmPbfExtract = "./data/melbourne_australia.osm.pbf" + + echo("Reading in the .osm.pbf extract line layer\n") + lines <- oe_read(osmPbfExtract, layer = "lines", extra_tags = "surface") + + highways <- lines %>% + filter (highway %in% + c("motorway", "motorway_link", "trunk", "trunk_link", + "primary", "primary_link", "secondary", "secondary_link", + "tertiary", "tertiary_link", "residential", "road", "unclassified", + "living_street", "cycleway", "track", "service", + "pedestrian", "footway", "path", "corridor", "steps")) + + surface.totals <- highways %>% + st_drop_geometry() %>% + group_by(surface) %>% + summarise(n = n()) %>% + ungroup() + + # # print outputs as comma-separated string + # surface.totals.output <- "" + # for (i in 1:nrow(surface.totals)) { + # line <- paste0(surface.totals$surface[i], " (", surface.totals$n[i], ")") + # surface.totals.output <- paste0(surface.totals.output, line) + # if (i < nrow(surface.totals)) { + # surface.totals.output <- paste0(surface.totals.output, ", ") + # } + # } + # print(surface.totals.output) + + return(surface.totals) + +} + +# # using checkSurfTypes to compile table of surface types, no. and surf_lvl +# check <- checkSurfTypes("./data/melbourne_australia.osm.pbf") +# check.totals <- addSurfImped(NA, check %>% mutate(length = 0))[[2]] %>% +# dplyr::select(surface, n, surf_lvl) From ef4d1a8507a740bb374d11e5a7a0ec9000b73fc1 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 15 Sep 2023 14:40:41 +1000 Subject: [PATCH 016/103] comment out testing line --- functions/addElevation.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/addElevation.R b/functions/addElevation.R index 46fb803..697b013 100644 --- a/functions/addElevation.R +++ b/functions/addElevation.R @@ -5,7 +5,7 @@ addElevation2Nodes <- function(nodes, rasterFile, multiplier=1){ } addElevation2Links <- function(network){ - network <- networkRestructured + # network <- networkRestructured nodes <- network[[1]] links <- network[[2]] From d1fc3e127f18a20a9b733b4487135d10a6c7e5b2 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 15 Sep 2023 14:44:23 +1000 Subject: [PATCH 017/103] fix function parameters --- functions/addDestinations.R | 5 +++-- functions/getDestinationTypes.R | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index a0abf38..5127af4 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -9,7 +9,7 @@ addDestinations <- function(nodes_current, edges_current, osmPbfExtract, - outputCRS) { + outputCrs) { # nodes_current = networkDensified[[1]] # edges_current = networkDensified[[2]] @@ -98,7 +98,8 @@ addDestinations <- function(nodes_current, destination.pt <- bind_rows(destination.layer(points), # add stations (from point, polygons and lines) to point table - getStation() %>% mutate(dest_type = "railway_station")) %>% + getStation(points, polygons, lines) %>% + mutate(dest_type = "railway_station")) %>% mutate(dest_id = row_number()) destination.poly <- diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R index 9010e82..d7960a3 100644 --- a/functions/getDestinationTypes.R +++ b/functions/getDestinationTypes.R @@ -129,7 +129,7 @@ getCafe <- function(layer) { # Returns list of stations as points # Note the buffer distance of 100m below; closest railway stations in Melbourne are # Riversdale & Willison (about 420m) -getStation <- function() { +getStation <- function(points, polygons, lines) { # general filter to find station objects filterStation <- function(layer) { return(layer %>% From 2c4a953e49af83b2ddd8e013f2fd888939ef6c85 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 18 Sep 2023 17:44:10 +0200 Subject: [PATCH 018/103] there is an issue with invalid polygons, removing them for now, to be fixed --- functions/addDestinations.R | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 5127af4..eaa6a95 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,6 +106,17 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) + # Some times polygons are not valid and have only two points. + # For now I am removing those, this needs to be fixed + invalid.poly <- NULL + for (i in 1:nrow(destination.poly) ){ + if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ + invalid.poly <- cbind(invalid.poly, i) } + } + destination.poly <- destination.poly[-c(invalid.poly),] + + destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # # check numbers of each destination type # chk <- full_join(destination.poly %>% # st_drop_geometry() %>% From 95099c55cf88634d94bfebb3c45e92bc2c64e81e Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 16:52:17 +1000 Subject: [PATCH 019/103] improve tag key:value pair splitting and processing --- NetworkGenerator.R | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f8be832..25e2e28 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,7 +4,9 @@ makeNetwork<-function(outputFileName="test"){ # INPUT NETWORK # Set this to your desired cooridinate system for the network - outputCrs=28355 + outputCrs=28355 # Melbourne + # outputCrs=28356 # Brisbane + # A flag for whether process raw osm extract or not processOsm=F # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm @@ -12,6 +14,7 @@ makeNetwork<-function(outputFileName="test"){ osmExtract='./data/melbourne.osm' # If procesOsm=F, set the following to the network sqlite file networkSqlite="./data/melbourne_network_unconfigured.sqlite" + # networkSqlite="./data/brisbane_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 @@ -42,7 +45,8 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" - + # osmPbfExtract="./data/Brisbane.osm.pbf" + # GTFS addGtfs=F gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file From fead62bededf192f27bf31d4d4ccc640a4a4aa84 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 16:53:16 +1000 Subject: [PATCH 020/103] commenting out invalid polygon issue for further review --- functions/addDestinations.R | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index eaa6a95..e3463a4 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,16 +106,22 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) - # Some times polygons are not valid and have only two points. + # [AJ] Some times polygons are not valid and have only two points. # For now I am removing those, this needs to be fixed - invalid.poly <- NULL - for (i in 1:nrow(destination.poly) ){ - if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ - invalid.poly <- cbind(invalid.poly, i) } - } - destination.poly <- destination.poly[-c(invalid.poly),] + + # [SP to AJ] The 'invalid.poly' part of this throws errors for Brisbane for + # multipart geometries; commented out for now; while 2-point polygons + # are clearly wrongly digitised, do they matter? + + # invalid.poly <- NULL + # for (i in 1:nrow(destination.poly) ){ + # if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ + # invalid.poly <- cbind(invalid.poly, i) } + # } + # destination.poly <- destination.poly[-c(invalid.poly),] - destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] # # check numbers of each destination type # chk <- full_join(destination.poly %>% From 8d196cb95932a8bbbe9aa4456a53366bc2e81036 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 17:01:07 +1000 Subject: [PATCH 021/103] Revert "improve tag key:value pair splitting and processing" This reverts commit 95099c55cf88634d94bfebb3c45e92bc2c64e81e. --- NetworkGenerator.R | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 25e2e28..f8be832 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,9 +4,7 @@ makeNetwork<-function(outputFileName="test"){ # INPUT NETWORK # Set this to your desired cooridinate system for the network - outputCrs=28355 # Melbourne - # outputCrs=28356 # Brisbane - + outputCrs=28355 # A flag for whether process raw osm extract or not processOsm=F # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm @@ -14,7 +12,6 @@ makeNetwork<-function(outputFileName="test"){ osmExtract='./data/melbourne.osm' # If procesOsm=F, set the following to the network sqlite file networkSqlite="./data/melbourne_network_unconfigured.sqlite" - # networkSqlite="./data/brisbane_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 @@ -45,8 +42,7 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" - # osmPbfExtract="./data/Brisbane.osm.pbf" - + # GTFS addGtfs=F gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file From 6cfcb46b67a8ff33bb0412d06d6e484dadd24061 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 17:14:31 +1000 Subject: [PATCH 022/103] improve tag key:value pair splitting and processing --- functions/processOsmTags.R | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/functions/processOsmTags.R b/functions/processOsmTags.R index a1797e4..c88ae5c 100644 --- a/functions/processOsmTags.R +++ b/functions/processOsmTags.R @@ -14,8 +14,11 @@ processOsmTags <- function(osm_df,this_defaults_df){ # this_defaults_df <- defaults_df osmWithDefaults <- inner_join(osm_df,this_defaults_df,by="highway") - # pre splitting the tags to save time - tagList <- strsplit(gsub('=>',',', gsub('"', '', osmWithDefaults$other_tags)),',') + + # pre splitting the tags to save time: replace tag separator "," with unique + # symbol ',,'; replace key:value separator "=>" with same unique symbol ',,'; + # remove " at start and end of tag string; then split at the unique symbol ',,' + tagList <-strsplit(gsub('"', '', gsub('"=>"', ',,', gsub('","', '",,"', osmWithDefaults$other_tags))), ',,') osmWithDefaults <- osmWithDefaults %>% mutate(cycleway=ifelse(highway=="cycleway",4,0)) %>% @@ -27,21 +30,25 @@ processOsmTags <- function(osm_df,this_defaults_df){ if (length(tags)>1) { - cycleway_tags <- tags[which(tags %like% "cycleway")+1] + # keys and values are odd and even-numbered tags respectively + keys <- tags[seq(1, length(tags), by = 2)] + values <- tags[seq(2, length(tags), by = 2)] + + cycleway_tags <- values[which(keys %like% "cycleway")] if(any(is.na(cycleway_tags))) cycleway_tags <- c() - bicycle_tags <- tags[which(tags=="bicycle")+1] + bicycle_tags <- values[which(keys=="bicycle")] if(any(is.na(bicycle_tags))) bicycle_tags <- c() - car_tags <- tags[which(tags %in% c("car","motor_vehicle"))+1] + car_tags <- values[which(keys %in% c("car","motor_vehicle"))] if(any(is.na(car_tags))) car_tags <- c() - foot_tags <- tags[which(tags %like% "foot")+1] + foot_tags <- values[which(keys %like% "foot")] if(any(is.na(foot_tags))) foot_tags <- c() - surface_tags <- tags[which(tags=="surface")+1] + surface_tags <- values[which(keys=="surface")] if(any(is.na(surface_tags))) surface_tags <- c() - oneway_tags <- as.character(tags[which(tags=="oneway")+1]) + oneway_tags <- as.character(values[which(keys=="oneway")]) if(length(oneway_tags)==0) oneway_tags <- c() - if("maxspeed" %in% tags) { - maxSpeed=as.integer(tags[which(tags=="maxspeed")+1]) + if("maxspeed" %in% keys) { + maxSpeed=as.integer(values[which(keys=="maxspeed")]) # added this as some links had weird "masxspeed" values such as 500km/h! # 150km/h limit might cause issues for autobahns in Germany, AJ Jan 2021. if(!(is.na(maxSpeed)) & 140 < maxSpeed){ @@ -55,8 +62,8 @@ processOsmTags <- function(osm_df,this_defaults_df){ df$freespeed[1]=freeSpeed } } - if("lanes" %in% tags) { - newLanes=as.integer(tags[which(tags=="lanes")+1]) + if("lanes" %in% keys) { + newLanes=as.integer(values[which(keys=="lanes")]) # some osm tags set the number of lanes to zero # added is.na since one of the lanes has a value of "2; 3" if(!is.na(newLanes) & newLanes > 0) { From 66184c0ab8e27a812e1a5c0caa345a5d80ff9d46 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 21:24:41 +1000 Subject: [PATCH 023/103] add ndvi to network --- NetworkGenerator.R | 17 ++++++++++++++++ functions/addNDVI.R | 39 +++++++++++++++++++++++++++++++++++++ functions/addSlopeImped.R | 2 +- functions/makeEdgesOneway.R | 3 +++ functions/restructureData.R | 7 ++++--- 5 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 functions/addNDVI.R diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f8be832..7b121a6 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -42,6 +42,14 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" + + # NDVI + # A flag for whether to add NDVI or not + addNDVI=T + # NDVI file - make sure it is in the same coordinate system as your network + ndviFile="./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # Buffer distance for finding average NDVI for links + ndviBuffDist=30 # GTFS addGtfs=F @@ -67,6 +75,7 @@ makeNetwork<-function(outputFileName="test"){ library(stringr) library(igraph) library(raster) + library(terra) library(rgdal) library(purrr) library(lwgeom) @@ -96,6 +105,7 @@ makeNetwork<-function(outputFileName="test"){ echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) echo(paste0("- Adding elevation: ", addElevation,"\n")) + echo(paste0("- Adding NDVI: ", addNDVI,"\n")) echo(paste0("- Adding PT from GTFS: ", addGtfs,"\n")) echo(paste0("- Writing outputs in SQLite format: ", writeSqlite,"\n")) echo(paste0("- Writing outputs in ShapeFile format: ", writeShp,"\n")) @@ -228,6 +238,13 @@ makeNetwork<-function(outputFileName="test"){ networkDensified <- densifyNetwork(networkConnected,desnificationMaxLengh, densifyBikeways) + # Adding NDVI to links + if(addNDVI) { + system.time(networkDensified[[2]] <- addNDVI2Links(networkDensified[[2]], + ndviFile, + ndviBuffDist)) + } + # adding destinations layer if (addDestinationLayer) { destinations <- addDestinations(networkDensified[[1]], diff --git a/functions/addNDVI.R b/functions/addNDVI.R new file mode 100644 index 0000000..18a73dc --- /dev/null +++ b/functions/addNDVI.R @@ -0,0 +1,39 @@ +# function to add NDVI to links, where NDVI is the average of the NDVI +# values within a 30m buffer of the links + +addNDVI2Links <- function(links, ndviFile, ndviBuffDist) { + + # links = networkRestructured[[2]] + # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # ndviBuffDist = 30 + + echo("Reading in the NDVI file\n") + + # read in NDVI file + ndvi <- rast(ndviFile) + + # buffer each link + links.buffered <- st_buffer(links, 30) + + echo(paste0("Finding NDVI values within ", ndviBuffDist, "m of each link\n")) + + # extract the NDVI values for the buffered links - produces a table with + # 2 columns, ID (which is the row number from links.buffered) and NDVI + ndvi_values <- terra::extract(ndvi, links.buffered) + + echo(paste("Finding mean pf NDVI values for each link\n")) + + # find the mean of the values for each link + ndvi_values_mean <- ndvi_values %>% + group_by(ID) %>% + summarise(ndvi = mean(NDVI, na.rm = TRUE)) + + # join to the links, using the row number and ID + links.with.ndvi <- links %>% + mutate(row_no = row_number()) %>% + left_join(., ndvi_values_mean, by = c("row_no" = "ID")) %>% + dplyr::select(-row_no) + + return(links.with.ndvi) + +} \ No newline at end of file diff --git a/functions/addSlopeImped.R b/functions/addSlopeImped.R index c5ee42e..a700a7c 100644 --- a/functions/addSlopeImped.R +++ b/functions/addSlopeImped.R @@ -12,7 +12,7 @@ addSlopeImped <- function(nodes_current, edges_current) { edges_current <- edges_current %>% - # some coastal links are missing elevation; make slope 0 + # some links (eg coastal) are missing elevation; make slope 0 mutate(slope_pct = ifelse(is.na(slope_pct), 0, slope_pct)) %>% # 50m of length per 1m of climb diff --git a/functions/makeEdgesOneway.R b/functions/makeEdgesOneway.R index 66a2878..b6e4bb9 100644 --- a/functions/makeEdgesOneway.R +++ b/functions/makeEdgesOneway.R @@ -46,6 +46,9 @@ makeEdgesOneway <- function(nodes_current, edges_current) { if ("slope_pct" %in% colnames(edges_twoway_reversed)) { required_fields <- c(required_fields, "slope_pct") } + if ("ndvi" %in% colnames(edges_twoway_reversed)) { + required_fields <- c(required_fields, "ndvi") + } edges_twoway_reversed <- edges_twoway_reversed %>% dplyr::select(all_of(required_fields)) diff --git a/functions/restructureData.R b/functions/restructureData.R index 824302f..26ec96e 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -65,9 +65,10 @@ restructureData <- function(networkList, highway_lookup, mutate(cycleway=ifelse(cycleway==2, "simple_lane" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==1, "shared_street" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==0, NA , cycleway)) %>% - dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, - permlanes, capacity, highway, is_oneway, cycleway, surface, - is_cycle, is_walk, is_car, modes) %>% + dplyr::select(any_of(c("from_id", "to_id", "fromX", "fromY", "toX", "toY", + "length", "freespeed", "permlanes", "capacity", + "highway", "is_oneway", "cycleway", "surface", + "is_cycle", "is_walk", "is_car", "modes", "ndvi"))) %>% mutate(id=NA) %>% relocate(id) From 001608d6e341d480fd51f23c27982766a3f5e807 Mon Sep 17 00:00:00 2001 From: StevePem Date: Sat, 30 Sep 2023 10:09:36 +1000 Subject: [PATCH 024/103] group city specific parameters together --- NetworkGenerator.R | 68 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 48 insertions(+), 20 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 7b121a6..fb5e9bf 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -2,23 +2,58 @@ makeNetwork<-function(outputFileName="test"){ # outputFileName="network" # Parameters -------------------------------------------------------------- + # CITY AND ITS PARAMETERS + # Set city + city = "Melbourne" + # city = "Brisbane" + + # City parameters to be set + # • outputCrs: desired coordinate system for network + # • osmExtract: if 'processOsm=T', OSM extract file in .osm format (.osm.pbf + # not supported for this step) + # • networkSqlite: if 'processOsm=F', network sqlite file + # • cropAreaPoly: if 'crop2TestArea=T' cropArea location from + # https://github.com/JamesChevalier/cities/tree/master/australia/victoria + # (only supported for Victoria at this stage) + # • demFile: if 'addElevation=T', digital elevation model raster file (must be + # in same coordinate system as network) + # • osmPbfExtract: if 'addDestinationLayer=T', OSM extract for destinations, + # in .osm.pbf format + # • ndviFile: if 'addNDVI=T', raster file with NDVI values (must be in same + # coordinate system as network) + # • gtfs_feed: if 'addGtfs=T', zip file containing GTFS data (must also set + # start and end dates in GTFS section) + + if (city == "Melbourne") { + outputCrs = 28355 + osmExtract = "./data/melbourne.osm" + networkSqlite = "./data/melbourne_network_unconfigured.sqlite" + cropAreaPoly = "city-of-melbourne_victoria" + demFile = "./data/DEM_melbourne.tif" + osmPbfExtract = "./data/melbourne_australia.osm.pbf" + ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" + + } else if (city == "Brisbane") { + outputCrs = 28356 + osmExtract = "" # must set 'processOsm=F' + networkSqlite = "./data/brisbane_network_unconfigured.sqlite" + cropAreaPoly = "" # must set 'crop2TestArea=F' + demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE + osmPbfExtract = "./data/brisbane_australia.osm.pbf" + ndviFile = "" # must set 'addNDVI=F' + gtfs_feed = "" # must set 'addGtfs=F' + + } + # INPUT NETWORK - # Set this to your desired cooridinate system for the network - outputCrs=28355 - # A flag for whether process raw osm extract or not + # A flag for whether process raw osm extract or not (if not, must have network sqlite) processOsm=F - # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm - # Note that osm.pbf format is not yet supported - osmExtract='./data/melbourne.osm' - # If procesOsm=F, set the following to the network sqlite file - networkSqlite="./data/melbourne_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 minDangleLinkLengh=500 crop2Area=F - # If crop2TestArea=T, find your area from https://github.com/JamesChevalier/cities/tree/master/australia/victoria and set the following to its poly name - cropAreaPoly="city-of-melbourne_victoria" # DENSIFICATION desnificationMaxLengh=500 @@ -32,28 +67,20 @@ makeNetwork<-function(outputFileName="test"){ # ELEVATION # A flag for whether to add elevation or not addElevation=T - # Digital elevation model file - make sure it is in the same coordinate system as your network - demFile= "./data/DEM_melbourne.tif" - # DEM's multiplier- set to 1 if DEM contains actual elevation ElevationMultiplier=1 # DESTINATIONS # A flag for whether to add a destinations layer (drawn from OSM) or not addDestinationLayer=T - # OSM extract for destinations, in .osm.pbf format - osmPbfExtract="./data/melbourne_australia.osm.pbf" - + # NDVI # A flag for whether to add NDVI or not addNDVI=T - # NDVI file - make sure it is in the same coordinate system as your network - ndviFile="./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" # Buffer distance for finding average NDVI for links ndviBuffDist=30 # GTFS addGtfs=F - gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file analysis_start = as.Date("2019-10-11","%Y-%m-%d") # Transit Feed start date analysis_end = as.Date("2019-10-17","%Y-%m-%d") # Transit Feed end date @@ -105,6 +132,7 @@ makeNetwork<-function(outputFileName="test"){ echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) echo(paste0("- Adding elevation: ", addElevation,"\n")) + echo(paste0("- Adding destination layer: ", addDestinationLayer,"\n")) echo(paste0("- Adding NDVI: ", addNDVI,"\n")) echo(paste0("- Adding PT from GTFS: ", addGtfs,"\n")) echo(paste0("- Writing outputs in SQLite format: ", writeSqlite,"\n")) @@ -279,7 +307,7 @@ makeNetwork<-function(outputFileName="test"){ } # Adding PT pseudo-network based on GTFS - # Adjust your analysis start date, end data and gtfs feed name below + # Adjust your analysis start date, end data and gtfs feed name above if(addGtfs) { # Adjust these parameters based on your GTFS file if(file.exists("data/studyRegion.sqlite")){ From 8e8c17c7168ceb8568390122c1e2ab3816598055 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 12 Oct 2023 19:41:43 +1100 Subject: [PATCH 025/103] invalid polygon error detection --- functions/addDestinations.R | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index e3463a4..a5999bc 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,21 +106,7 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) - # [AJ] Some times polygons are not valid and have only two points. - # For now I am removing those, this needs to be fixed - - # [SP to AJ] The 'invalid.poly' part of this throws errors for Brisbane for - # multipart geometries; commented out for now; while 2-point polygons - # are clearly wrongly digitised, do they matter? - - # invalid.poly <- NULL - # for (i in 1:nrow(destination.poly) ){ - # if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ - # invalid.poly <- cbind(invalid.poly, i) } - # } - # destination.poly <- destination.poly[-c(invalid.poly),] - - # destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # Remove any invalid polygons as they may cause errors destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] # # check numbers of each destination type From e50a8fdd40c739f6af7f6ef50f1a20a792c8435e Mon Sep 17 00:00:00 2001 From: jafshin Date: Wed, 13 Sep 2023 18:14:03 +0200 Subject: [PATCH 026/103] adding osmconver to renv and updating packages --- renv.lock | 563 +++++++++++++++++++++++++++++++----------------- renv/activate.R | 453 +++++++++++++++++++++++++++----------- 2 files changed, 692 insertions(+), 324 deletions(-) diff --git a/renv.lock b/renv.lock index c4ec6e2..5b8047f 100644 --- a/renv.lock +++ b/renv.lock @@ -1,6 +1,6 @@ { "R": { - "Version": "4.2.1", + "Version": "4.1.2", "Repositories": [ { "Name": "CRAN", @@ -14,369 +14,448 @@ "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "b2866e62bab9378c3cc9476a1954226b", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "b2866e62bab9378c3cc9476a1954226b" }, "KernSmooth": { "Package": "KernSmooth", - "Version": "2.23-20", + "Version": "2.23-22", "Source": "Repository", "Repository": "CRAN", - "Hash": "8dcfa99b14c296bc9f1fd64d52fd3ce7", - "Requirements": [] + "Requirements": [ + "R", + "stats" + ], + "Hash": "2fecebc3047322fa5930f74fae5de70f" }, "MASS": { "Package": "MASS", - "Version": "7.3-58.2", + "Version": "7.3-60", "Source": "Repository", "Repository": "CRAN", - "Hash": "e02d1a0f6122fd3e634b25b433704344", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "methods", + "stats", + "utils" + ], + "Hash": "a56a6365b3fa73293ea8d084be0d9bb0" }, "Matrix": { "Package": "Matrix", - "Version": "1.4-1", + "Version": "1.6-1", "Source": "Repository", "Repository": "CRAN", - "Hash": "699c47c606293bdfbc9fd78a93c9c8fe", "Requirements": [ - "lattice" - ] + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "cb6855ac711958ca734b75e631b2035d" }, "R6": { "Package": "R6", "Version": "2.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "470851b6d5d0ac559e9d01bb352b4021", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "470851b6d5d0ac559e9d01bb352b4021" }, "Rcpp": { "Package": "Rcpp", - "Version": "1.0.10", + "Version": "1.0.11", "Source": "Repository", "Repository": "CRAN", - "Hash": "e749cae40fa9ef469b6050959517453c", - "Requirements": [] + "Requirements": [ + "methods", + "utils" + ], + "Hash": "ae6cbbe1492f4de79c45fce06f967ce8" }, "askpass": { "Package": "askpass", - "Version": "1.1", + "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "e8a22846fff485f0be3770c2da758713", "Requirements": [ "sys" - ] - }, - "backports": { - "Package": "backports", - "Version": "1.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "c39fbec8a30d23e721980b8afb31984c", - "Requirements": [] - }, - "checkmate": { - "Package": "checkmate", - "Version": "2.1.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "147e4db6909d8814bb30f671b49d7e06", - "Requirements": [ - "backports" - ] + ], + "Hash": "cad6cf7f1d5f6e906700b9d3e718c796" }, "class": { "Package": "class", - "Version": "7.3-21", + "Version": "7.3-22", "Source": "Repository", "Repository": "CRAN", - "Hash": "8ae0d4328e2eb3a582dfd5391a3663b7", "Requirements": [ - "MASS" - ] + "MASS", + "R", + "stats", + "utils" + ], + "Hash": "f91f6b29f38b8c280f2b9477787d4bb2" }, "classInt": { "Package": "classInt", - "Version": "0.4-8", + "Version": "0.4-10", "Source": "Repository", "Repository": "CRAN", - "Hash": "298fa500d773db0845935cd73bfd9c2e", "Requirements": [ "KernSmooth", + "R", "class", - "e1071" - ] + "e1071", + "grDevices", + "graphics", + "stats" + ], + "Hash": "f5a40793b1ae463a7ffb3902a95bf864" }, "cli": { "Package": "cli", - "Version": "3.6.0", + "Version": "3.6.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "3177a5a16c243adc199ba33117bd9657", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "89e6d8219950eac806ae0c489052048a" }, "cpp11": { "Package": "cpp11", - "Version": "0.4.3", + "Version": "0.4.6", "Source": "Repository", "Repository": "CRAN", - "Hash": "ed588261931ee3be2c700d22e94a29ab", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "707fae4bbf73697ec8d85f9d7076c061" }, "curl": { "Package": "curl", - "Version": "5.0.0", + "Version": "5.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "e4f97056611e8e6b8b852d13b7400cf1", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "511bacbfa153a15251166b463b4da4f9" }, "data.table": { "Package": "data.table", - "Version": "1.14.6", + "Version": "1.14.8", "Source": "Repository", "Repository": "CRAN", - "Hash": "aecef50008ea7b57c76f1cb5c127fb02", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "b4c06e554f33344e044ccd7fdca750a9" }, "digest": { "Package": "digest", - "Version": "0.6.31", + "Version": "0.6.33", "Source": "Repository", "Repository": "CRAN", - "Hash": "8b708f296afd9ae69f450f9640be8990", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "b18a9cf3c003977b0cc49d5e76ebe48d" }, "dplyr": { "Package": "dplyr", - "Version": "1.1.0", + "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "d3c34618017e7ae252d46d79a1b9ec32", "Requirements": [ + "R", "R6", "cli", "generics", "glue", "lifecycle", "magrittr", + "methods", "pillar", "rlang", "tibble", "tidyselect", + "utils", "vctrs" - ] + ], + "Hash": "e85ffbebaad5f70e1a2e2ef4302b4949" }, "e1071": { "Package": "e1071", - "Version": "1.7-12", + "Version": "1.7-13", "Source": "Repository", "Repository": "CRAN", - "Hash": "0d776df7577206e100c2c4b508208b10", "Requirements": [ "class", - "proxy" - ] - }, - "ellipsis": { - "Package": "ellipsis", - "Version": "0.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077", - "Requirements": [ - "rlang" - ] + "grDevices", + "graphics", + "methods", + "proxy", + "stats", + "utils" + ], + "Hash": "1046cb48d06cb40c2900d8878f03a0fe" }, "fansi": { "Package": "fansi", "Version": "1.0.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "utils" + ], + "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" }, "fs": { "Package": "fs", - "Version": "1.6.0", + "Version": "1.6.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "0120e8c933bace1141e0b0d376b0c010", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "47b5f30c720c23999b913a1a635cf0bb" }, "generics": { "Package": "generics", "Version": "0.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "15e9634c0fcd294799e9b2e929ed1b86", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "15e9634c0fcd294799e9b2e929ed1b86" }, "geodist": { "Package": "geodist", "Version": "0.0.8", "Source": "Repository", "Repository": "CRAN", - "Hash": "be85dc7cec76dbd9a9478724ba518146", - "Requirements": [] + "Hash": "be85dc7cec76dbd9a9478724ba518146" }, "glue": { "Package": "glue", "Version": "1.6.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e", - "Requirements": [] + "Requirements": [ + "R", + "methods" + ], + "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" }, "gtfsio": { "Package": "gtfsio", - "Version": "1.0.0", + "Version": "1.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "dd7277616c17c88d4061d84f80c1d78a", "Requirements": [ "data.table", + "utils", "zip" - ] + ], + "Hash": "d02f8456a746098e0456cfd98db473c5" }, "hablar": { "Package": "hablar", - "Version": "0.3.1", + "Version": "0.3.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "2330dfd884d745b504b21f72c84c5ac5", "Requirements": [ "dplyr", "lubridate", "purrr" - ] + ], + "Hash": "c7a6a49207405553fd26eff865d94360" }, "hms": { "Package": "hms", - "Version": "1.1.2", + "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "41100392191e1244b887878b533eea91", "Requirements": [ - "ellipsis", "lifecycle", + "methods", "pkgconfig", "rlang", "vctrs" - ] + ], + "Hash": "b59377caa7ed00fa41808342002138f9" }, "httr": { "Package": "httr", - "Version": "1.4.4", + "Version": "1.4.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "57557fac46471f0dbbf44705cc6a5c8c", "Requirements": [ + "R", "R6", "curl", "jsonlite", "mime", "openssl" - ] + ], + "Hash": "ac107251d9d9fd72f0ca8049988f1d7f" }, "igraph": { "Package": "igraph", - "Version": "1.3.5", + "Version": "1.5.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "132b06d7060f11ba8b4c7e7f385e9b7a", "Requirements": [ "Matrix", + "R", + "cli", + "cpp11", + "grDevices", + "graphics", + "lifecycle", "magrittr", + "methods", "pkgconfig", - "rlang" - ] + "rlang", + "stats", + "utils" + ], + "Hash": "80401cb5ec513e8ddc56764d03f63669" }, "jsonlite": { "Package": "jsonlite", - "Version": "1.8.4", + "Version": "1.8.7", "Source": "Repository", "Repository": "CRAN", - "Hash": "a4269a09a9b865579b2635c77e572374", - "Requirements": [] + "Requirements": [ + "methods" + ], + "Hash": "266a20443ca13c65688b2116d5220f76" }, "lattice": { "Package": "lattice", - "Version": "0.20-45", + "Version": "0.21-8", "Source": "Repository", "Repository": "CRAN", - "Hash": "b64cdbb2b340437c4ee047a1f4c4377b", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "grid", + "stats", + "utils" + ], + "Hash": "0b8a6d63c8770f02a8b5635f3c431e6b" }, "lifecycle": { "Package": "lifecycle", "Version": "1.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "001cecbeac1cff9301bdc3775ee46a86", "Requirements": [ + "R", "cli", "glue", "rlang" - ] + ], + "Hash": "001cecbeac1cff9301bdc3775ee46a86" }, "lubridate": { "Package": "lubridate", - "Version": "1.9.1", + "Version": "1.9.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "88ad585eb49669b7f2db3f5ef3c8307d", "Requirements": [ + "R", "generics", + "methods", "timechange" - ] + ], + "Hash": "e25f18436e3efd42c7c590a1c4c15390" }, "lwgeom": { "Package": "lwgeom", - "Version": "0.2-11", + "Version": "0.2-13", "Source": "Repository", "Repository": "CRAN", - "Hash": "14cf492ea07bf498fb34c80563ba593b", "Requirements": [ + "R", "Rcpp", "sf", "units" - ] + ], + "Hash": "9804362cc0267990ac61a85edeca73ed" }, "magrittr": { "Package": "magrittr", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "7ce2733a9826b3aeb1775d56fd305472", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "7ce2733a9826b3aeb1775d56fd305472" }, "mime": { "Package": "mime", "Version": "0.12", "Source": "Repository", "Repository": "CRAN", - "Hash": "18e9c28c1d3ca1560ce30658b22ce104", - "Requirements": [] + "Requirements": [ + "tools" + ], + "Hash": "18e9c28c1d3ca1560ce30658b22ce104" }, "openssl": { "Package": "openssl", - "Version": "2.0.5", + "Version": "2.1.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "b04c27110bf367b4daa93f34f3d58e75", "Requirements": [ "askpass" - ] + ], + "Hash": "273a6bb4a9844c296a459d2176673270" + }, + "osmextract": { + "Package": "osmextract", + "Version": "0.5.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "httr", + "jsonlite", + "sf", + "tools", + "utils" + ], + "Hash": "325765c4927551138994b1722d6094fc" }, "pillar": { "Package": "pillar", - "Version": "1.8.1", + "Version": "1.9.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "f2316df30902c81729ae9de95ad5a608", "Requirements": [ "cli", "fansi", @@ -384,128 +463,171 @@ "lifecycle", "rlang", "utf8", + "utils", "vctrs" - ] + ], + "Hash": "15da5a8412f317beeee6175fbc76f4bb" }, "pkgconfig": { "Package": "pkgconfig", "Version": "2.0.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "01f28d4278f15c76cddbea05899c5d6f", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "01f28d4278f15c76cddbea05899c5d6f" }, "proxy": { "Package": "proxy", "Version": "0.4-27", "Source": "Repository", "Repository": "CRAN", - "Hash": "e0ef355c12942cf7a6b91a6cfaea8b3e", - "Requirements": [] + "Requirements": [ + "R", + "stats", + "utils" + ], + "Hash": "e0ef355c12942cf7a6b91a6cfaea8b3e" }, "purrr": { "Package": "purrr", - "Version": "1.0.1", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "d71c815267c640f17ddbf7f16144b4bb", "Requirements": [ + "R", "cli", "lifecycle", "magrittr", "rlang", "vctrs" - ] + ], + "Hash": "1cba04a4e9414bdefc9dcaa99649a8dc" }, "raster": { "Package": "raster", - "Version": "3.6-14", + "Version": "3.6-23", "Source": "Repository", "Repository": "CRAN", - "Hash": "2406e2380237f4dcf84e83260419895a", "Requirements": [ + "R", "Rcpp", + "methods", "sp", "terra" - ] + ], + "Hash": "337d6d70f7d6bf78df236a5a53f09db0" }, "renv": { "Package": "renv", - "Version": "0.16.0", + "Version": "1.0.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "c9e8442ab69bc21c9697ecf856c1e6c7", - "Requirements": [] + "Requirements": [ + "utils" + ], + "Hash": "4b22ac016fe54028b88d0c68badbd061" }, "rgdal": { "Package": "rgdal", - "Version": "1.6-4", + "Version": "1.6-7", "Source": "Repository", "Repository": "CRAN", - "Hash": "92183bff0bac3a711fde35a22c1bf45b", "Requirements": [ - "sp" - ] + "R", + "grDevices", + "graphics", + "methods", + "sp", + "stats", + "utils" + ], + "Hash": "10b777236c9e7855bc9dea8e347e30b7" }, "rlang": { "Package": "rlang", - "Version": "1.0.6", + "Version": "1.1.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "4ed1f8336c8d52c3e750adcdc57228a7", - "Requirements": [] + "Requirements": [ + "R", + "utils" + ], + "Hash": "a85c767b55f0bf9b7ad16c6d7baee5bb" }, "s2": { "Package": "s2", - "Version": "1.1.2", + "Version": "1.1.4", "Source": "Repository", "Repository": "CRAN", - "Hash": "5cca323babe990f99d5bc3402f64b905", "Requirements": [ + "R", "Rcpp", "wk" - ] + ], + "Hash": "f1cbe03bb3346f8e817518ffa20f9f5a" }, "sf": { "Package": "sf", - "Version": "1.0-9", + "Version": "1.0-14", "Source": "Repository", "Repository": "CRAN", - "Hash": "8a348490fefb623e7935f405230900a7", "Requirements": [ "DBI", + "R", "Rcpp", "classInt", + "grDevices", + "graphics", + "grid", "magrittr", + "methods", "s2", - "units" - ] + "stats", + "tools", + "units", + "utils" + ], + "Hash": "e2111252a76984ca50bf8d6314348681" }, "sp": { "Package": "sp", - "Version": "1.6-0", + "Version": "2.0-0", "Source": "Repository", "Repository": "CRAN", - "Hash": "6674e075a078d9c3bde8ba800367347c", "Requirements": [ - "lattice" - ] + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "2551981e6f85d59c81652bf654d6c3ca" }, "stringi": { "Package": "stringi", "Version": "1.7.12", "Source": "Repository", "Repository": "CRAN", - "Hash": "ca8bd84263c77310739d2cf64d84d7c9", - "Requirements": [] + "Requirements": [ + "R", + "stats", + "tools", + "utils" + ], + "Hash": "ca8bd84263c77310739d2cf64d84d7c9" }, "stringr": { "Package": "stringr", "Version": "1.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8", "Requirements": [ + "R", "cli", "glue", "lifecycle", @@ -513,140 +635,179 @@ "rlang", "stringi", "vctrs" - ] + ], + "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" }, "sys": { "Package": "sys", - "Version": "3.4.1", + "Version": "3.4.2", "Source": "Repository", "Repository": "CRAN", - "Hash": "34c16f1ef796057bfa06d3f4ff818a5d", - "Requirements": [] + "Hash": "3a1be13d68d47a8cd0bfd74739ca1555" }, "terra": { "Package": "terra", - "Version": "1.7-3", + "Version": "1.7-46", "Source": "Repository", "Repository": "CRAN", - "Hash": "230c4bfbb6aae92d92ef9762b683541e", "Requirements": [ - "Rcpp" - ] + "R", + "Rcpp", + "methods" + ], + "Hash": "d15182a8a20ffc5880e721bfa1bf4ce9" }, "tibble": { "Package": "tibble", - "Version": "3.1.8", + "Version": "3.2.1", "Source": "Repository", "Repository": "CRAN", - "Hash": "56b6934ef0f8c68225949a8672fe1a8f", "Requirements": [ + "R", "fansi", "lifecycle", "magrittr", + "methods", "pillar", "pkgconfig", "rlang", + "utils", "vctrs" - ] + ], + "Hash": "a84e2cc86d07289b3b6f5069df7a004c" + }, + "tidyr": { + "Package": "tidyr", + "Version": "1.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "cpp11", + "dplyr", + "glue", + "lifecycle", + "magrittr", + "purrr", + "rlang", + "stringr", + "tibble", + "tidyselect", + "utils", + "vctrs" + ], + "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" }, "tidyselect": { "Package": "tidyselect", "Version": "1.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "79540e5fcd9e0435af547d885f184fd5", "Requirements": [ + "R", "cli", "glue", "lifecycle", "rlang", "vctrs", "withr" - ] + ], + "Hash": "79540e5fcd9e0435af547d885f184fd5" }, "tidytransit": { "Package": "tidytransit", - "Version": "1.4", + "Version": "1.6.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "9e685f7c56c2e2c2f92438f4ac6b1644", "Requirements": [ - "checkmate", + "R", "data.table", "digest", "dplyr", "geodist", "gtfsio", "hms", - "httr", "rlang", "sf" - ] + ], + "Hash": "a9255650349cc3438c03020e509c1074" }, "timechange": { "Package": "timechange", "Version": "0.2.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "8548b44f79a35ba1791308b61e6012d7", "Requirements": [ + "R", "cpp11" - ] + ], + "Hash": "8548b44f79a35ba1791308b61e6012d7" }, "units": { "Package": "units", - "Version": "0.8-1", + "Version": "0.8-3", "Source": "Repository", "Repository": "CRAN", - "Hash": "81433967f9b34a120a4f5a5a016cd5ed", "Requirements": [ + "R", "Rcpp" - ] + ], + "Hash": "880ebc99e4d8f7e5f3caeb2f12632583" }, "utf8": { "Package": "utf8", "Version": "1.2.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "1fe17157424bb09c48a8b3b550c753bc", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "1fe17157424bb09c48a8b3b550c753bc" }, "vctrs": { "Package": "vctrs", - "Version": "0.5.2", + "Version": "0.6.3", "Source": "Repository", "Repository": "CRAN", - "Hash": "e4ffa94ceed5f124d429a5a5f0f5b378", "Requirements": [ + "R", "cli", "glue", "lifecycle", "rlang" - ] + ], + "Hash": "d0ef2856b83dc33ea6e255caf6229ee2" }, "withr": { "Package": "withr", "Version": "2.5.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "c0e49a9760983e81e55cdd9be92e7182", - "Requirements": [] + "Requirements": [ + "R", + "grDevices", + "graphics", + "stats" + ], + "Hash": "c0e49a9760983e81e55cdd9be92e7182" }, "wk": { "Package": "wk", - "Version": "0.7.1", + "Version": "0.8.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "141385279f2cd7faa6a3eccd8d1279dd", - "Requirements": [] + "Requirements": [ + "R" + ], + "Hash": "aaf7e20556e3125a09d53453814ad339" }, "zip": { "Package": "zip", - "Version": "2.2.2", + "Version": "2.3.0", "Source": "Repository", "Repository": "CRAN", - "Hash": "c42bfcec3fa6a0cce17ce1f8bc684f88", - "Requirements": [] + "Hash": "d98c94dacb7e0efcf83b0a133a705504" } } } diff --git a/renv/activate.R b/renv/activate.R index 019b5a6..2969c73 100644 --- a/renv/activate.R +++ b/renv/activate.R @@ -2,11 +2,27 @@ local({ # the requested version of renv - version <- "0.16.0" + version <- "1.0.2" + attr(version, "sha") <- NULL # the project directory project <- getwd() + # use start-up diagnostics if enabled + diagnostics <- Sys.getenv("RENV_STARTUP_DIAGNOSTICS", unset = "FALSE") + if (diagnostics) { + start <- Sys.time() + profile <- tempfile("renv-startup-", fileext = ".Rprof") + utils::Rprof(profile) + on.exit({ + utils::Rprof(NULL) + elapsed <- signif(difftime(Sys.time(), start, units = "auto"), digits = 2L) + writeLines(sprintf("- renv took %s to run the autoloader.", format(elapsed))) + writeLines(sprintf("- Profile: %s", profile)) + print(utils::summaryRprof(profile)) + }, add = TRUE) + } + # figure out whether the autoloader is enabled enabled <- local({ @@ -60,21 +76,75 @@ local({ # load bootstrap tools `%||%` <- function(x, y) { - if (is.environment(x) || length(x)) x else y + if (is.null(x)) y else x + } + + catf <- function(fmt, ..., appendLF = TRUE) { + + quiet <- getOption("renv.bootstrap.quiet", default = FALSE) + if (quiet) + return(invisible()) + + msg <- sprintf(fmt, ...) + cat(msg, file = stdout(), sep = if (appendLF) "\n" else "") + + invisible(msg) + + } + + header <- function(label, + ..., + prefix = "#", + suffix = "-", + n = min(getOption("width"), 78)) + { + label <- sprintf(label, ...) + n <- max(n - nchar(label) - nchar(prefix) - 2L, 8L) + if (n <= 0) + return(paste(prefix, label)) + + tail <- paste(rep.int(suffix, n), collapse = "") + paste0(prefix, " ", label, " ", tail) + + } + + startswith <- function(string, prefix) { + substring(string, 1, nchar(prefix)) == prefix } bootstrap <- function(version, library) { + friendly <- renv_bootstrap_version_friendly(version) + section <- header(sprintf("Bootstrapping renv %s", friendly)) + catf(section) + # attempt to download renv - tarball <- tryCatch(renv_bootstrap_download(version), error = identity) - if (inherits(tarball, "error")) - stop("failed to download renv ", version) + catf("- Downloading renv ... ", appendLF = FALSE) + withCallingHandlers( + tarball <- renv_bootstrap_download(version), + error = function(err) { + catf("FAILED") + stop("failed to download:\n", conditionMessage(err)) + } + ) + catf("OK") + on.exit(unlink(tarball), add = TRUE) # now attempt to install - status <- tryCatch(renv_bootstrap_install(version, tarball, library), error = identity) - if (inherits(status, "error")) - stop("failed to install renv ", version) + catf("- Installing renv ... ", appendLF = FALSE) + withCallingHandlers( + status <- renv_bootstrap_install(version, tarball, library), + error = function(err) { + catf("FAILED") + stop("failed to install:\n", conditionMessage(err)) + } + ) + catf("OK") + + # add empty line to break up bootstrapping from normal output + catf("") + return(invisible()) } renv_bootstrap_tests_running <- function() { @@ -83,28 +153,32 @@ local({ renv_bootstrap_repos <- function() { + # get CRAN repository + cran <- getOption("renv.repos.cran", "https://cloud.r-project.org") + # check for repos override repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA) - if (!is.na(repos)) + if (!is.na(repos)) { + + # check for RSPM; if set, use a fallback repository for renv + rspm <- Sys.getenv("RSPM", unset = NA) + if (identical(rspm, repos)) + repos <- c(RSPM = rspm, CRAN = cran) + return(repos) + } + # check for lockfile repositories repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity) if (!inherits(repos, "error") && length(repos)) return(repos) - # if we're testing, re-use the test repositories - if (renv_bootstrap_tests_running()) - return(getOption("renv.tests.repos")) - # retrieve current repos repos <- getOption("repos") # ensure @CRAN@ entries are resolved - repos[repos == "@CRAN@"] <- getOption( - "renv.repos.cran", - "https://cloud.r-project.org" - ) + repos[repos == "@CRAN@"] <- cran # add in renv.bootstrap.repos if set default <- c(FALLBACK = "https://cloud.r-project.org") @@ -143,33 +217,34 @@ local({ renv_bootstrap_download <- function(version) { - # if the renv version number has 4 components, assume it must - # be retrieved via github - nv <- numeric_version(version) - components <- unclass(nv)[[1]] - - # if this appears to be a development version of 'renv', we'll - # try to restore from github - dev <- length(components) == 4L - - # begin collecting different methods for finding renv - methods <- c( - renv_bootstrap_download_tarball, - if (dev) - renv_bootstrap_download_github - else c( - renv_bootstrap_download_cran_latest, - renv_bootstrap_download_cran_archive + sha <- attr(version, "sha", exact = TRUE) + + methods <- if (!is.null(sha)) { + + # attempting to bootstrap a development version of renv + c( + function() renv_bootstrap_download_tarball(sha), + function() renv_bootstrap_download_github(sha) ) - ) + + } else { + + # attempting to bootstrap a release version of renv + c( + function() renv_bootstrap_download_tarball(version), + function() renv_bootstrap_download_cran_latest(version), + function() renv_bootstrap_download_cran_archive(version) + ) + + } for (method in methods) { - path <- tryCatch(method(version), error = identity) + path <- tryCatch(method(), error = identity) if (is.character(path) && file.exists(path)) return(path) } - stop("failed to download renv ", version) + stop("All download methods failed") } @@ -233,8 +308,6 @@ local({ type <- spec$type repos <- spec$repos - message("* Downloading renv ", version, " ... ", appendLF = FALSE) - baseurl <- utils::contrib.url(repos = repos, type = type) ext <- if (identical(type, "source")) ".tar.gz" @@ -251,13 +324,10 @@ local({ condition = identity ) - if (inherits(status, "condition")) { - message("FAILED") + if (inherits(status, "condition")) return(FALSE) - } # report success and return - message("OK (downloaded ", type, ")") destfile } @@ -314,8 +384,6 @@ local({ urls <- file.path(repos, "src/contrib/Archive/renv", name) destfile <- file.path(tempdir(), name) - message("* Downloading renv ", version, " ... ", appendLF = FALSE) - for (url in urls) { status <- tryCatch( @@ -323,14 +391,11 @@ local({ condition = identity ) - if (identical(status, 0L)) { - message("OK") + if (identical(status, 0L)) return(destfile) - } } - message("FAILED") return(FALSE) } @@ -344,8 +409,7 @@ local({ return() # allow directories - info <- file.info(tarball, extra_cols = FALSE) - if (identical(info$isdir, TRUE)) { + if (dir.exists(tarball)) { name <- sprintf("renv_%s.tar.gz", version) tarball <- file.path(tarball, name) } @@ -354,7 +418,7 @@ local({ if (!file.exists(tarball)) { # let the user know we weren't able to honour their request - fmt <- "* RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." + fmt <- "- RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." msg <- sprintf(fmt, tarball) warning(msg) @@ -363,10 +427,7 @@ local({ } - fmt <- "* Bootstrapping with tarball at path '%s'." - msg <- sprintf(fmt, tarball) - message(msg) - + catf("- Using local tarball '%s'.", tarball) tarball } @@ -393,8 +454,6 @@ local({ on.exit(do.call(base::options, saved), add = TRUE) } - message("* Downloading renv ", version, " from GitHub ... ", appendLF = FALSE) - url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version) name <- sprintf("renv_%s.tar.gz", version) destfile <- file.path(tempdir(), name) @@ -404,26 +463,105 @@ local({ condition = identity ) - if (!identical(status, 0L)) { - message("FAILED") + if (!identical(status, 0L)) return(FALSE) - } - message("OK") + renv_bootstrap_download_augment(destfile) + return(destfile) } + # Add Sha to DESCRIPTION. This is stop gap until #890, after which we + # can use renv::install() to fully capture metadata. + renv_bootstrap_download_augment <- function(destfile) { + sha <- renv_bootstrap_git_extract_sha1_tar(destfile) + if (is.null(sha)) { + return() + } + + # Untar + tempdir <- tempfile("renv-github-") + on.exit(unlink(tempdir, recursive = TRUE), add = TRUE) + untar(destfile, exdir = tempdir) + pkgdir <- dir(tempdir, full.names = TRUE)[[1]] + + # Modify description + desc_path <- file.path(pkgdir, "DESCRIPTION") + desc_lines <- readLines(desc_path) + remotes_fields <- c( + "RemoteType: github", + "RemoteHost: api.github.com", + "RemoteRepo: renv", + "RemoteUsername: rstudio", + "RemotePkgRef: rstudio/renv", + paste("RemoteRef: ", sha), + paste("RemoteSha: ", sha) + ) + writeLines(c(desc_lines[desc_lines != ""], remotes_fields), con = desc_path) + + # Re-tar + local({ + old <- setwd(tempdir) + on.exit(setwd(old), add = TRUE) + + tar(destfile, compression = "gzip") + }) + invisible() + } + + # Extract the commit hash from a git archive. Git archives include the SHA1 + # hash as the comment field of the tarball pax extended header + # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) + # For GitHub archives this should be the first header after the default one + # (512 byte) header. + renv_bootstrap_git_extract_sha1_tar <- function(bundle) { + + # open the bundle for reading + # We use gzcon for everything because (from ?gzcon) + # > Reading from a connection which does not supply a 'gzip' magic + # > header is equivalent to reading from the original connection + conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) + on.exit(close(conn)) + + # The default pax header is 512 bytes long and the first pax extended header + # with the comment should be 51 bytes long + # `52 comment=` (11 chars) + 40 byte SHA1 hash + len <- 0x200 + 0x33 + res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) + + if (grepl("^52 comment=", res)) { + sub("52 comment=", "", res) + } else { + NULL + } + } + renv_bootstrap_install <- function(version, tarball, library) { # attempt to install it into project library - message("* Installing renv ", version, " ... ", appendLF = FALSE) dir.create(library, showWarnings = FALSE, recursive = TRUE) + output <- renv_bootstrap_install_impl(library, tarball) + + # check for successful install + status <- attr(output, "status") + if (is.null(status) || identical(status, 0L)) + return(status) + + # an error occurred; report it + header <- "installation of renv failed" + lines <- paste(rep.int("=", nchar(header)), collapse = "") + text <- paste(c(header, lines, output), collapse = "\n") + stop(text) + + } + + renv_bootstrap_install_impl <- function(library, tarball) { # invoke using system2 so we can capture and report output bin <- R.home("bin") exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R" - r <- file.path(bin, exe) + R <- file.path(bin, exe) args <- c( "--vanilla", "CMD", "INSTALL", "--no-multiarch", @@ -431,19 +569,7 @@ local({ shQuote(path.expand(tarball)) ) - output <- system2(r, args, stdout = TRUE, stderr = TRUE) - message("Done!") - - # check for successful install - status <- attr(output, "status") - if (is.numeric(status) && !identical(status, 0L)) { - header <- "Error installing renv:" - lines <- paste(rep.int("=", nchar(header)), collapse = "") - text <- c(header, lines, output) - writeLines(text, con = stderr()) - } - - status + system2(R, args, stdout = TRUE, stderr = TRUE) } @@ -653,34 +779,62 @@ local({ } - renv_bootstrap_validate_version <- function(version) { + renv_bootstrap_validate_version <- function(version, description = NULL) { - loadedversion <- utils::packageDescription("renv", fields = "Version") - if (version == loadedversion) - return(TRUE) + # resolve description file + # + # avoid passing lib.loc to `packageDescription()` below, since R will + # use the loaded version of the package by default anyhow. note that + # this function should only be called after 'renv' is loaded + # https://github.com/rstudio/renv/issues/1625 + description <- description %||% packageDescription("renv") - # assume four-component versions are from GitHub; three-component - # versions are from CRAN - components <- strsplit(loadedversion, "[.-]")[[1]] - remote <- if (length(components) == 4L) - paste("rstudio/renv", loadedversion, sep = "@") + # check whether requested version 'version' matches loaded version of renv + sha <- attr(version, "sha", exact = TRUE) + valid <- if (!is.null(sha)) + renv_bootstrap_validate_version_dev(sha, description) else - paste("renv", loadedversion, sep = "@") + renv_bootstrap_validate_version_release(version, description) + + if (valid) + return(TRUE) + + # the loaded version of renv doesn't match the requested version; + # give the user instructions on how to proceed + remote <- if (!is.null(description[["RemoteSha"]])) { + paste("rstudio/renv", description[["RemoteSha"]], sep = "@") + } else { + paste("renv", description[["Version"]], sep = "@") + } + + # display both loaded version + sha if available + friendly <- renv_bootstrap_version_friendly( + version = description[["Version"]], + sha = description[["RemoteSha"]] + ) fmt <- paste( "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.", - "Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", - "Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", + "- Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", + "- Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", sep = "\n" ) - - msg <- sprintf(fmt, loadedversion, version, remote) - warning(msg, call. = FALSE) + catf(fmt, friendly, renv_bootstrap_version_friendly(version), remote) FALSE } + renv_bootstrap_validate_version_dev <- function(version, description) { + expected <- description[["RemoteSha"]] + is.character(expected) && startswith(expected, version) + } + + renv_bootstrap_validate_version_release <- function(version, description) { + expected <- description[["Version"]] + is.character(expected) && identical(expected, version) + } + renv_bootstrap_hash_text <- function(text) { hashfile <- tempfile("renv-hash-") @@ -700,6 +854,12 @@ local({ # warn if the version of renv loaded does not match renv_bootstrap_validate_version(version) + # execute renv load hooks, if any + hooks <- getHook("renv::autoload") + for (hook in hooks) + if (is.function(hook)) + tryCatch(hook(), error = warnify) + # load the project renv::load(project) @@ -839,14 +999,79 @@ local({ } + renv_bootstrap_version_friendly <- function(version, shafmt = NULL, sha = NULL) { + sha <- sha %||% attr(version, "sha", exact = TRUE) + parts <- c(version, sprintf(shafmt %||% " [sha: %s]", substring(sha, 1L, 7L))) + paste(parts, collapse = "") + } + + renv_bootstrap_exec <- function(project, libpath, version) { + if (!renv_bootstrap_load(project, libpath, version)) + renv_bootstrap_run(version, libpath) + } + + renv_bootstrap_run <- function(version, libpath) { + + # perform bootstrap + bootstrap(version, libpath) + + # exit early if we're just testing bootstrap + if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) + return(TRUE) + + # try again to load + if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { + return(renv::load(project = getwd())) + } + + # failed to download or load renv; warn the user + msg <- c( + "Failed to find an renv installation: the project will not be loaded.", + "Use `renv::activate()` to re-initialize the project." + ) + + warning(paste(msg, collapse = "\n"), call. = FALSE) + + } + + + renv_bootstrap_in_rstudio <- function() { + commandArgs()[[1]] == "RStudio" + } + + # Used to work around buglet in RStudio if hook uses readline + renv_bootstrap_flush_console <- function() { + tryCatch({ + tools <- as.environment("tools:rstudio") + tools$.rs.api.sendToConsole("", echo = FALSE, focus = FALSE) + }, error = function(cnd) {}) + } renv_json_read <- function(file = NULL, text = NULL) { + jlerr <- NULL + # if jsonlite is loaded, use that instead - if ("jsonlite" %in% loadedNamespaces()) - renv_json_read_jsonlite(file, text) + if ("jsonlite" %in% loadedNamespaces()) { + + json <- catch(renv_json_read_jsonlite(file, text)) + if (!inherits(json, "error")) + return(json) + + jlerr <- json + + } + + # otherwise, fall back to the default JSON reader + json <- catch(renv_json_read_default(file, text)) + if (!inherits(json, "error")) + return(json) + + # report an error + if (!is.null(jlerr)) + stop(jlerr) else - renv_json_read_default(file, text) + stop(json) } @@ -960,35 +1185,17 @@ local({ # construct full libpath libpath <- file.path(root, prefix) - # attempt to load - if (renv_bootstrap_load(project, libpath, version)) - return(TRUE) - - # load failed; inform user we're about to bootstrap - prefix <- paste("# Bootstrapping renv", version) - postfix <- paste(rep.int("-", 77L - nchar(prefix)), collapse = "") - header <- paste(prefix, postfix) - message(header) - - # perform bootstrap - bootstrap(version, libpath) - - # exit early if we're just testing bootstrap - if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) - return(TRUE) - - # try again to load - if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { - message("* Successfully installed and loaded renv ", version, ".") - return(renv::load()) + if (renv_bootstrap_in_rstudio()) { + # RStudio only updates console once .Rprofile is finished, so + # instead run code on sessionInit + setHook("rstudio.sessionInit", function(...) { + renv_bootstrap_exec(project, libpath, version) + renv_bootstrap_flush_console() + }) + } else { + renv_bootstrap_exec(project, libpath, version) } - # failed to download or load renv; warn the user - msg <- c( - "Failed to find an renv installation: the project will not be loaded.", - "Use `renv::activate()` to re-initialize the project." - ) - - warning(paste(msg, collapse = "\n"), call. = FALSE) + invisible() }) From fd11677b5cb39b9023d4151b78f5b6a6ae150586 Mon Sep 17 00:00:00 2001 From: jafshin Date: Thu, 14 Sep 2023 13:14:18 +0200 Subject: [PATCH 027/103] should not remove links with parking info - might be on street parking --- processOSM.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/processOSM.sh b/processOSM.sh index 083854b..b5f0f4c 100755 --- a/processOSM.sh +++ b/processOSM.sh @@ -30,7 +30,6 @@ ogr2ogr -update -overwrite -nln roads -f "SQLite" -dsco SPATIALITE=YES \ (other_tags IS NULL OR (other_tags NOT LIKE '%busbar%' AND \ other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%parking%' AND \ other_tags NOT LIKE '%\"access\"=>\"private\"%')) " \ ./data/temp.sqlite $extract # highway NOT LIKE '%service%' AND \ @@ -56,7 +55,6 @@ ogr2ogr -update -overwrite -nln pt -f "SQLite" -dialect SQLite -sql \ WHERE other_tags LIKE '%railway%' AND \ other_tags NOT LIKE '%busbar%' AND \ other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%parking%' AND \ other_tags NOT LIKE '%miniature%' AND \ other_tags NOT LIKE '%proposed%' AND \ other_tags NOT LIKE '%disused%' AND \ From 3ff2c22492d890394b5a755e07350ad6b711ced2 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 15 Sep 2023 14:40:41 +1000 Subject: [PATCH 028/103] comment out testing line --- functions/addElevation.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/addElevation.R b/functions/addElevation.R index 46fb803..697b013 100644 --- a/functions/addElevation.R +++ b/functions/addElevation.R @@ -5,7 +5,7 @@ addElevation2Nodes <- function(nodes, rasterFile, multiplier=1){ } addElevation2Links <- function(network){ - network <- networkRestructured + # network <- networkRestructured nodes <- network[[1]] links <- network[[2]] From b9d05235019873e58463fc118f47bebaf94b667a Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 15 Sep 2023 14:44:23 +1000 Subject: [PATCH 029/103] fix function parameters --- functions/addDestinations.R | 5 +++-- functions/getDestinationTypes.R | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index a0abf38..5127af4 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -9,7 +9,7 @@ addDestinations <- function(nodes_current, edges_current, osmPbfExtract, - outputCRS) { + outputCrs) { # nodes_current = networkDensified[[1]] # edges_current = networkDensified[[2]] @@ -98,7 +98,8 @@ addDestinations <- function(nodes_current, destination.pt <- bind_rows(destination.layer(points), # add stations (from point, polygons and lines) to point table - getStation() %>% mutate(dest_type = "railway_station")) %>% + getStation(points, polygons, lines) %>% + mutate(dest_type = "railway_station")) %>% mutate(dest_id = row_number()) destination.poly <- diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R index 9010e82..d7960a3 100644 --- a/functions/getDestinationTypes.R +++ b/functions/getDestinationTypes.R @@ -129,7 +129,7 @@ getCafe <- function(layer) { # Returns list of stations as points # Note the buffer distance of 100m below; closest railway stations in Melbourne are # Riversdale & Willison (about 420m) -getStation <- function() { +getStation <- function(points, polygons, lines) { # general filter to find station objects filterStation <- function(layer) { return(layer %>% From e7296d0d9dcf1a27eeb24fcab499a53ba3044ea5 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 18 Sep 2023 17:44:10 +0200 Subject: [PATCH 030/103] there is an issue with invalid polygons, removing them for now, to be fixed --- functions/addDestinations.R | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 5127af4..eaa6a95 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,6 +106,17 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) + # Some times polygons are not valid and have only two points. + # For now I am removing those, this needs to be fixed + invalid.poly <- NULL + for (i in 1:nrow(destination.poly) ){ + if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ + invalid.poly <- cbind(invalid.poly, i) } + } + destination.poly <- destination.poly[-c(invalid.poly),] + + destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # # check numbers of each destination type # chk <- full_join(destination.poly %>% # st_drop_geometry() %>% From 7fbd2b88c6e26f0ed90676bafa9845c06fb68b24 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 16:52:17 +1000 Subject: [PATCH 031/103] improve tag key:value pair splitting and processing --- NetworkGenerator.R | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f8be832..25e2e28 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,7 +4,9 @@ makeNetwork<-function(outputFileName="test"){ # INPUT NETWORK # Set this to your desired cooridinate system for the network - outputCrs=28355 + outputCrs=28355 # Melbourne + # outputCrs=28356 # Brisbane + # A flag for whether process raw osm extract or not processOsm=F # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm @@ -12,6 +14,7 @@ makeNetwork<-function(outputFileName="test"){ osmExtract='./data/melbourne.osm' # If procesOsm=F, set the following to the network sqlite file networkSqlite="./data/melbourne_network_unconfigured.sqlite" + # networkSqlite="./data/brisbane_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 @@ -42,7 +45,8 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" - + # osmPbfExtract="./data/Brisbane.osm.pbf" + # GTFS addGtfs=F gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file From 1ae702d1196691068c94cfc0e2a2f85d95b9aa90 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 16:53:16 +1000 Subject: [PATCH 032/103] commenting out invalid polygon issue for further review --- functions/addDestinations.R | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index eaa6a95..e3463a4 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,16 +106,22 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) - # Some times polygons are not valid and have only two points. + # [AJ] Some times polygons are not valid and have only two points. # For now I am removing those, this needs to be fixed - invalid.poly <- NULL - for (i in 1:nrow(destination.poly) ){ - if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ - invalid.poly <- cbind(invalid.poly, i) } - } - destination.poly <- destination.poly[-c(invalid.poly),] + + # [SP to AJ] The 'invalid.poly' part of this throws errors for Brisbane for + # multipart geometries; commented out for now; while 2-point polygons + # are clearly wrongly digitised, do they matter? + + # invalid.poly <- NULL + # for (i in 1:nrow(destination.poly) ){ + # if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ + # invalid.poly <- cbind(invalid.poly, i) } + # } + # destination.poly <- destination.poly[-c(invalid.poly),] - destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] # # check numbers of each destination type # chk <- full_join(destination.poly %>% From 0fde0fad1e90168934d8782e050bb1e9dca30697 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 17:01:07 +1000 Subject: [PATCH 033/103] Revert "improve tag key:value pair splitting and processing" This reverts commit 95099c55cf88634d94bfebb3c45e92bc2c64e81e. --- NetworkGenerator.R | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 25e2e28..f8be832 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,9 +4,7 @@ makeNetwork<-function(outputFileName="test"){ # INPUT NETWORK # Set this to your desired cooridinate system for the network - outputCrs=28355 # Melbourne - # outputCrs=28356 # Brisbane - + outputCrs=28355 # A flag for whether process raw osm extract or not processOsm=F # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm @@ -14,7 +12,6 @@ makeNetwork<-function(outputFileName="test"){ osmExtract='./data/melbourne.osm' # If procesOsm=F, set the following to the network sqlite file networkSqlite="./data/melbourne_network_unconfigured.sqlite" - # networkSqlite="./data/brisbane_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 @@ -45,8 +42,7 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" - # osmPbfExtract="./data/Brisbane.osm.pbf" - + # GTFS addGtfs=F gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file From 9f61b7607991957f103c9a6a688158f3d638e6c2 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 17:14:31 +1000 Subject: [PATCH 034/103] improve tag key:value pair splitting and processing --- functions/processOsmTags.R | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/functions/processOsmTags.R b/functions/processOsmTags.R index a1797e4..c88ae5c 100644 --- a/functions/processOsmTags.R +++ b/functions/processOsmTags.R @@ -14,8 +14,11 @@ processOsmTags <- function(osm_df,this_defaults_df){ # this_defaults_df <- defaults_df osmWithDefaults <- inner_join(osm_df,this_defaults_df,by="highway") - # pre splitting the tags to save time - tagList <- strsplit(gsub('=>',',', gsub('"', '', osmWithDefaults$other_tags)),',') + + # pre splitting the tags to save time: replace tag separator "," with unique + # symbol ',,'; replace key:value separator "=>" with same unique symbol ',,'; + # remove " at start and end of tag string; then split at the unique symbol ',,' + tagList <-strsplit(gsub('"', '', gsub('"=>"', ',,', gsub('","', '",,"', osmWithDefaults$other_tags))), ',,') osmWithDefaults <- osmWithDefaults %>% mutate(cycleway=ifelse(highway=="cycleway",4,0)) %>% @@ -27,21 +30,25 @@ processOsmTags <- function(osm_df,this_defaults_df){ if (length(tags)>1) { - cycleway_tags <- tags[which(tags %like% "cycleway")+1] + # keys and values are odd and even-numbered tags respectively + keys <- tags[seq(1, length(tags), by = 2)] + values <- tags[seq(2, length(tags), by = 2)] + + cycleway_tags <- values[which(keys %like% "cycleway")] if(any(is.na(cycleway_tags))) cycleway_tags <- c() - bicycle_tags <- tags[which(tags=="bicycle")+1] + bicycle_tags <- values[which(keys=="bicycle")] if(any(is.na(bicycle_tags))) bicycle_tags <- c() - car_tags <- tags[which(tags %in% c("car","motor_vehicle"))+1] + car_tags <- values[which(keys %in% c("car","motor_vehicle"))] if(any(is.na(car_tags))) car_tags <- c() - foot_tags <- tags[which(tags %like% "foot")+1] + foot_tags <- values[which(keys %like% "foot")] if(any(is.na(foot_tags))) foot_tags <- c() - surface_tags <- tags[which(tags=="surface")+1] + surface_tags <- values[which(keys=="surface")] if(any(is.na(surface_tags))) surface_tags <- c() - oneway_tags <- as.character(tags[which(tags=="oneway")+1]) + oneway_tags <- as.character(values[which(keys=="oneway")]) if(length(oneway_tags)==0) oneway_tags <- c() - if("maxspeed" %in% tags) { - maxSpeed=as.integer(tags[which(tags=="maxspeed")+1]) + if("maxspeed" %in% keys) { + maxSpeed=as.integer(values[which(keys=="maxspeed")]) # added this as some links had weird "masxspeed" values such as 500km/h! # 150km/h limit might cause issues for autobahns in Germany, AJ Jan 2021. if(!(is.na(maxSpeed)) & 140 < maxSpeed){ @@ -55,8 +62,8 @@ processOsmTags <- function(osm_df,this_defaults_df){ df$freespeed[1]=freeSpeed } } - if("lanes" %in% tags) { - newLanes=as.integer(tags[which(tags=="lanes")+1]) + if("lanes" %in% keys) { + newLanes=as.integer(values[which(keys=="lanes")]) # some osm tags set the number of lanes to zero # added is.na since one of the lanes has a value of "2; 3" if(!is.na(newLanes) & newLanes > 0) { From 2096413b2f2a9fec3d4b94d705e7867bfd97d521 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 29 Sep 2023 21:24:41 +1000 Subject: [PATCH 035/103] add ndvi to network --- NetworkGenerator.R | 17 ++++++++++++++++ functions/addNDVI.R | 39 +++++++++++++++++++++++++++++++++++++ functions/addSlopeImped.R | 2 +- functions/makeEdgesOneway.R | 3 +++ functions/restructureData.R | 7 ++++--- 5 files changed, 64 insertions(+), 4 deletions(-) create mode 100644 functions/addNDVI.R diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f8be832..7b121a6 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -42,6 +42,14 @@ makeNetwork<-function(outputFileName="test"){ addDestinationLayer=T # OSM extract for destinations, in .osm.pbf format osmPbfExtract="./data/melbourne_australia.osm.pbf" + + # NDVI + # A flag for whether to add NDVI or not + addNDVI=T + # NDVI file - make sure it is in the same coordinate system as your network + ndviFile="./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # Buffer distance for finding average NDVI for links + ndviBuffDist=30 # GTFS addGtfs=F @@ -67,6 +75,7 @@ makeNetwork<-function(outputFileName="test"){ library(stringr) library(igraph) library(raster) + library(terra) library(rgdal) library(purrr) library(lwgeom) @@ -96,6 +105,7 @@ makeNetwork<-function(outputFileName="test"){ echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) echo(paste0("- Adding elevation: ", addElevation,"\n")) + echo(paste0("- Adding NDVI: ", addNDVI,"\n")) echo(paste0("- Adding PT from GTFS: ", addGtfs,"\n")) echo(paste0("- Writing outputs in SQLite format: ", writeSqlite,"\n")) echo(paste0("- Writing outputs in ShapeFile format: ", writeShp,"\n")) @@ -228,6 +238,13 @@ makeNetwork<-function(outputFileName="test"){ networkDensified <- densifyNetwork(networkConnected,desnificationMaxLengh, densifyBikeways) + # Adding NDVI to links + if(addNDVI) { + system.time(networkDensified[[2]] <- addNDVI2Links(networkDensified[[2]], + ndviFile, + ndviBuffDist)) + } + # adding destinations layer if (addDestinationLayer) { destinations <- addDestinations(networkDensified[[1]], diff --git a/functions/addNDVI.R b/functions/addNDVI.R new file mode 100644 index 0000000..18a73dc --- /dev/null +++ b/functions/addNDVI.R @@ -0,0 +1,39 @@ +# function to add NDVI to links, where NDVI is the average of the NDVI +# values within a 30m buffer of the links + +addNDVI2Links <- function(links, ndviFile, ndviBuffDist) { + + # links = networkRestructured[[2]] + # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # ndviBuffDist = 30 + + echo("Reading in the NDVI file\n") + + # read in NDVI file + ndvi <- rast(ndviFile) + + # buffer each link + links.buffered <- st_buffer(links, 30) + + echo(paste0("Finding NDVI values within ", ndviBuffDist, "m of each link\n")) + + # extract the NDVI values for the buffered links - produces a table with + # 2 columns, ID (which is the row number from links.buffered) and NDVI + ndvi_values <- terra::extract(ndvi, links.buffered) + + echo(paste("Finding mean pf NDVI values for each link\n")) + + # find the mean of the values for each link + ndvi_values_mean <- ndvi_values %>% + group_by(ID) %>% + summarise(ndvi = mean(NDVI, na.rm = TRUE)) + + # join to the links, using the row number and ID + links.with.ndvi <- links %>% + mutate(row_no = row_number()) %>% + left_join(., ndvi_values_mean, by = c("row_no" = "ID")) %>% + dplyr::select(-row_no) + + return(links.with.ndvi) + +} \ No newline at end of file diff --git a/functions/addSlopeImped.R b/functions/addSlopeImped.R index c5ee42e..a700a7c 100644 --- a/functions/addSlopeImped.R +++ b/functions/addSlopeImped.R @@ -12,7 +12,7 @@ addSlopeImped <- function(nodes_current, edges_current) { edges_current <- edges_current %>% - # some coastal links are missing elevation; make slope 0 + # some links (eg coastal) are missing elevation; make slope 0 mutate(slope_pct = ifelse(is.na(slope_pct), 0, slope_pct)) %>% # 50m of length per 1m of climb diff --git a/functions/makeEdgesOneway.R b/functions/makeEdgesOneway.R index 66a2878..b6e4bb9 100644 --- a/functions/makeEdgesOneway.R +++ b/functions/makeEdgesOneway.R @@ -46,6 +46,9 @@ makeEdgesOneway <- function(nodes_current, edges_current) { if ("slope_pct" %in% colnames(edges_twoway_reversed)) { required_fields <- c(required_fields, "slope_pct") } + if ("ndvi" %in% colnames(edges_twoway_reversed)) { + required_fields <- c(required_fields, "ndvi") + } edges_twoway_reversed <- edges_twoway_reversed %>% dplyr::select(all_of(required_fields)) diff --git a/functions/restructureData.R b/functions/restructureData.R index 824302f..26ec96e 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -65,9 +65,10 @@ restructureData <- function(networkList, highway_lookup, mutate(cycleway=ifelse(cycleway==2, "simple_lane" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==1, "shared_street" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==0, NA , cycleway)) %>% - dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, - permlanes, capacity, highway, is_oneway, cycleway, surface, - is_cycle, is_walk, is_car, modes) %>% + dplyr::select(any_of(c("from_id", "to_id", "fromX", "fromY", "toX", "toY", + "length", "freespeed", "permlanes", "capacity", + "highway", "is_oneway", "cycleway", "surface", + "is_cycle", "is_walk", "is_car", "modes", "ndvi"))) %>% mutate(id=NA) %>% relocate(id) From ebfab62b9c28f738bf5b97aacbd34c551c92a842 Mon Sep 17 00:00:00 2001 From: StevePem Date: Sat, 30 Sep 2023 10:09:36 +1000 Subject: [PATCH 036/103] group city specific parameters together --- NetworkGenerator.R | 68 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 48 insertions(+), 20 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 7b121a6..fb5e9bf 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -2,23 +2,58 @@ makeNetwork<-function(outputFileName="test"){ # outputFileName="network" # Parameters -------------------------------------------------------------- + # CITY AND ITS PARAMETERS + # Set city + city = "Melbourne" + # city = "Brisbane" + + # City parameters to be set + # • outputCrs: desired coordinate system for network + # • osmExtract: if 'processOsm=T', OSM extract file in .osm format (.osm.pbf + # not supported for this step) + # • networkSqlite: if 'processOsm=F', network sqlite file + # • cropAreaPoly: if 'crop2TestArea=T' cropArea location from + # https://github.com/JamesChevalier/cities/tree/master/australia/victoria + # (only supported for Victoria at this stage) + # • demFile: if 'addElevation=T', digital elevation model raster file (must be + # in same coordinate system as network) + # • osmPbfExtract: if 'addDestinationLayer=T', OSM extract for destinations, + # in .osm.pbf format + # • ndviFile: if 'addNDVI=T', raster file with NDVI values (must be in same + # coordinate system as network) + # • gtfs_feed: if 'addGtfs=T', zip file containing GTFS data (must also set + # start and end dates in GTFS section) + + if (city == "Melbourne") { + outputCrs = 28355 + osmExtract = "./data/melbourne.osm" + networkSqlite = "./data/melbourne_network_unconfigured.sqlite" + cropAreaPoly = "city-of-melbourne_victoria" + demFile = "./data/DEM_melbourne.tif" + osmPbfExtract = "./data/melbourne_australia.osm.pbf" + ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" + + } else if (city == "Brisbane") { + outputCrs = 28356 + osmExtract = "" # must set 'processOsm=F' + networkSqlite = "./data/brisbane_network_unconfigured.sqlite" + cropAreaPoly = "" # must set 'crop2TestArea=F' + demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE + osmPbfExtract = "./data/brisbane_australia.osm.pbf" + ndviFile = "" # must set 'addNDVI=F' + gtfs_feed = "" # must set 'addGtfs=F' + + } + # INPUT NETWORK - # Set this to your desired cooridinate system for the network - outputCrs=28355 - # A flag for whether process raw osm extract or not + # A flag for whether process raw osm extract or not (if not, must have network sqlite) processOsm=F - # If processOsm=T, Set this to your osm extract file name, e.g., melbourne.osm - # Note that osm.pbf format is not yet supported - osmExtract='./data/melbourne.osm' - # If procesOsm=F, set the following to the network sqlite file - networkSqlite="./data/melbourne_network_unconfigured.sqlite" # SIMPLIFICATION shortLinkLength=20 minDangleLinkLengh=500 crop2Area=F - # If crop2TestArea=T, find your area from https://github.com/JamesChevalier/cities/tree/master/australia/victoria and set the following to its poly name - cropAreaPoly="city-of-melbourne_victoria" # DENSIFICATION desnificationMaxLengh=500 @@ -32,28 +67,20 @@ makeNetwork<-function(outputFileName="test"){ # ELEVATION # A flag for whether to add elevation or not addElevation=T - # Digital elevation model file - make sure it is in the same coordinate system as your network - demFile= "./data/DEM_melbourne.tif" - # DEM's multiplier- set to 1 if DEM contains actual elevation ElevationMultiplier=1 # DESTINATIONS # A flag for whether to add a destinations layer (drawn from OSM) or not addDestinationLayer=T - # OSM extract for destinations, in .osm.pbf format - osmPbfExtract="./data/melbourne_australia.osm.pbf" - + # NDVI # A flag for whether to add NDVI or not addNDVI=T - # NDVI file - make sure it is in the same coordinate system as your network - ndviFile="./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" # Buffer distance for finding average NDVI for links ndviBuffDist=30 # GTFS addGtfs=F - gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" # link to the GTFS .zip file analysis_start = as.Date("2019-10-11","%Y-%m-%d") # Transit Feed start date analysis_end = as.Date("2019-10-17","%Y-%m-%d") # Transit Feed end date @@ -105,6 +132,7 @@ makeNetwork<-function(outputFileName="test"){ echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) echo(paste0("- Adding elevation: ", addElevation,"\n")) + echo(paste0("- Adding destination layer: ", addDestinationLayer,"\n")) echo(paste0("- Adding NDVI: ", addNDVI,"\n")) echo(paste0("- Adding PT from GTFS: ", addGtfs,"\n")) echo(paste0("- Writing outputs in SQLite format: ", writeSqlite,"\n")) @@ -279,7 +307,7 @@ makeNetwork<-function(outputFileName="test"){ } # Adding PT pseudo-network based on GTFS - # Adjust your analysis start date, end data and gtfs feed name below + # Adjust your analysis start date, end data and gtfs feed name above if(addGtfs) { # Adjust these parameters based on your GTFS file if(file.exists("data/studyRegion.sqlite")){ From 6cdc3e5053e38fee620342c2dab6e26605d93dcc Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 12 Oct 2023 19:41:43 +1100 Subject: [PATCH 037/103] invalid polygon error detection --- functions/addDestinations.R | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index e3463a4..a5999bc 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -106,21 +106,7 @@ addDestinations <- function(nodes_current, destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number()) - # [AJ] Some times polygons are not valid and have only two points. - # For now I am removing those, this needs to be fixed - - # [SP to AJ] The 'invalid.poly' part of this throws errors for Brisbane for - # multipart geometries; commented out for now; while 2-point polygons - # are clearly wrongly digitised, do they matter? - - # invalid.poly <- NULL - # for (i in 1:nrow(destination.poly) ){ - # if(sapply(st_geometry(destination.poly)[[i]], function(x) nrow(x[[1]])) < 3){ - # invalid.poly <- cbind(invalid.poly, i) } - # } - # destination.poly <- destination.poly[-c(invalid.poly),] - - # destination.poly <- destination.poly[- which(!st_is_valid(destination.poly$geometry)), ] + # Remove any invalid polygons as they may cause errors destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] # # check numbers of each destination type From 8733207bda31bb2c4bc4b89bcabc2840b0f4195b Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 27 Oct 2023 13:09:48 +1100 Subject: [PATCH 038/103] retain destination area and location details --- functions/addDestinations.R | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index a5999bc..fa87522 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -93,18 +93,25 @@ addDestinations <- function(nodes_current, # ----------------------------------# echo("Finding destinations and their nearby nodes\n") - # create tables for points and polygons, and allocate unique id's (so features - # multiple multiple nodes can be grouped by the id where required) + # create tables for points and polygons, allocate unique id's (so features + # multiple multiple nodes can be grouped by the id where required), + # and store area and location details destination.pt <- bind_rows(destination.layer(points), # add stations (from point, polygons and lines) to point table getStation(points, polygons, lines) %>% mutate(dest_type = "railway_station")) %>% - mutate(dest_id = row_number()) + mutate(dest_id = row_number(), + area_m2 = 0, + centroid_x = st_coordinates(.)[, 1], + centroid_y = st_coordinates(.)[, 2]) destination.poly <- destination.layer(polygons) %>% - mutate(dest_id = max(destination.pt$dest_id) + row_number()) + mutate(dest_id = max(destination.pt$dest_id) + row_number(), + area_m2 = as_numeric(st_area(.)), + centroid_x = st_coordinates(st_centroid(.))[, 1], + centroid_y = st_coordinates(st_centroid(.))[, 2]) # Remove any invalid polygons as they may cause errors destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] From 89324f51243dbc1346674cda08c8a10f4ef04718 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 7 Nov 2023 13:43:45 +1100 Subject: [PATCH 039/103] parking POIs --- functions/addDestinations.R | 11 ++++++----- functions/getDestinationTypes.R | 10 +++++++++- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index fa87522..e5c00f5 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -40,9 +40,9 @@ addDestinations <- function(nodes_current, # some don't exist for some layer types extra.tags <- c("access", "amenity", "building", "grades", "healthcare", "healthcare:speciality", "isced:level", "landuse", "leisure", - "network", "operator", "operator:type", "public_transport", - "railway", "school", "shop", "social_facility", "sport", - "tourism", "train") + "network", "operator", "operator:type", "parking", + "public_transport", "railway", "school", "shop", + "social_facility", "sport", "tourism", "train") # oe_vectortranslate(osmPbfExtract, layer = "multipolygons", extra_tags = extra.tags) # oe_vectortranslate(osmPbfExtract, layer = "points", extra_tags = extra.tags) # oe_vectortranslate(osmPbfExtract, layer = "lines", extra_tags = extra.tags) @@ -85,7 +85,8 @@ addDestinations <- function(nodes_current, getPost(layer) %>% mutate(dest_type = "post_office"), getBank(layer) %>% mutate(dest_type = "bank"), getRestaurant(layer) %>% mutate(dest_type = "restaurant"), - getCafe(layer) %>% mutate(dest_type = "cafe") + getCafe(layer) %>% mutate(dest_type = "cafe"), + getParking(layer) %>% mutate(dest_type = "parking") )) } @@ -109,7 +110,7 @@ addDestinations <- function(nodes_current, destination.poly <- destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number(), - area_m2 = as_numeric(st_area(.)), + area_m2 = as.numeric(st_area(.)), centroid_x = st_coordinates(st_centroid(.))[, 1], centroid_y = st_coordinates(st_centroid(.))[, 2]) diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R index d7960a3..526f295 100644 --- a/functions/getDestinationTypes.R +++ b/functions/getDestinationTypes.R @@ -125,7 +125,15 @@ getCafe <- function(layer) { return(layer %>% filter(amenity == "cafe")) } -# 8 railway stations ---- + +# 8 parking ---- +getParking <- function(layer) { + return(layer %>% filter(amenity == "parking" & + !access %in% c("no", "private"))) +} + + +# 9 railway stations ---- # Returns list of stations as points # Note the buffer distance of 100m below; closest railway stations in Melbourne are # Riversdale & Willison (about 420m) From 8fd780f2ab2feddafee364b00b6995316dbfb595 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 9 Jan 2024 11:19:12 +1100 Subject: [PATCH 040/103] Destinations - PT from GTFS and other improvements --- NetworkGenerator.R | 21 +++++++-- functions/addDestinations.R | 30 ++++++++---- functions/addNDVI.R | 12 ++++- functions/getDestinationTypes.R | 64 +++++++++++++------------ functions/getPTStops.R | 83 +++++++++++++++++++++++++++++++++ 5 files changed, 164 insertions(+), 46 deletions(-) create mode 100644 functions/getPTStops.R diff --git a/NetworkGenerator.R b/NetworkGenerator.R index fb5e9bf..1d07c57 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -6,6 +6,7 @@ makeNetwork<-function(outputFileName="test"){ # Set city city = "Melbourne" # city = "Brisbane" + # city = "Munich" # City parameters to be set # • outputCrs: desired coordinate system for network @@ -21,8 +22,8 @@ makeNetwork<-function(outputFileName="test"){ # in .osm.pbf format # • ndviFile: if 'addNDVI=T', raster file with NDVI values (must be in same # coordinate system as network) - # • gtfs_feed: if 'addGtfs=T', zip file containing GTFS data (must also set - # start and end dates in GTFS section) + # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing + # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) if (city == "Melbourne") { outputCrs = 28355 @@ -32,7 +33,7 @@ makeNetwork<-function(outputFileName="test"){ demFile = "./data/DEM_melbourne.tif" osmPbfExtract = "./data/melbourne_australia.osm.pbf" ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" - gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" + gtfs_feed = "./data/gtfs.zip" } else if (city == "Brisbane") { outputCrs = 28356 @@ -42,8 +43,18 @@ makeNetwork<-function(outputFileName="test"){ demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE osmPbfExtract = "./data/brisbane_australia.osm.pbf" ndviFile = "" # must set 'addNDVI=F' - gtfs_feed = "" # must set 'addGtfs=F' + gtfs_feed = "./data/SEQ_GTFS.zip" + } else if (city == "Munich") { + outputCrs = 25832 + osmExtract = "" # must set 'processOsm=F' + networkSqlite = "./data/munich_network_unconfigured.sqlite" + cropAreaPoly = "" # must set 'crop2TestArea=F' + demFile = "" # must set 'addElevation=F' + osmPbfExtract = "./data/munich_germany.osm.pbf" + ndviFile = "" # must set 'addNDVI=F' + gtfs_feed = "./data/mvv_gtfs.zip" # to test; if not then >> # must set 'addGtfs=F' and 'addDestinationLayer=F' + } # INPUT NETWORK @@ -278,6 +289,8 @@ makeNetwork<-function(outputFileName="test"){ destinations <- addDestinations(networkDensified[[1]], networkDensified[[2]], osmPbfExtract, + city, + gtfs_feed, outputCrs) } diff --git a/functions/addDestinations.R b/functions/addDestinations.R index e5c00f5..bbe9d57 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -7,19 +7,23 @@ # in 'getDestinationTypes.R' addDestinations <- function(nodes_current, - edges_current, - osmPbfExtract, - outputCrs) { + edges_current, + osmPbfExtract, + city, + gtfs_feed, + outputCrs) { # nodes_current = networkDensified[[1]] # edges_current = networkDensified[[2]] # osmPbfExtract = "./data/melbourne_australia.osm.pbf" + # city = "Melbourne" + # gtfs_feed = "./data/gtfs.zip" # outputCrs = 28355 # # check layers # st_layers(osmPbfExtract) # # only multipolygons, points and lines are required (not multilinestrings - # # or other_relations) + # # or other_relations) [and lines not required when using GTFS for PT stops] # # check keys # options(max.print = 2000) @@ -55,8 +59,8 @@ addDestinations <- function(nodes_current, st_transform(outputCrs) points <- oe_read(osmPbfExtract, layer = "points", extra_tags = extra.tags) %>% st_transform(outputCrs) - lines <- oe_read(osmPbfExtract, layer = "lines", extra_tags = extra.tags) %>% - st_transform(outputCrs) + # lines <- oe_read(osmPbfExtract, layer = "lines", extra_tags = extra.tags) %>% + # st_transform(outputCrs) # function to extract specific destination types from point or polygon layers ---- @@ -99,14 +103,20 @@ addDestinations <- function(nodes_current, # and store area and location details destination.pt <- bind_rows(destination.layer(points), - # add stations (from point, polygons and lines) to point table - getStation(points, polygons, lines) %>% - mutate(dest_type = "railway_station")) %>% + + # # add stations (from point, polygons and lines) to point table + # getStation(points, polygons, lines) %>% + # mutate(dest_type = "railway_station")) %>% + + # add PT stops (from GTFS feed) to point table + getPTStops(city, gtfs_feed, outputCrs, edges_current) %>% + mutate(dest_type = "pt_stop")) %>% + mutate(dest_id = row_number(), area_m2 = 0, centroid_x = st_coordinates(.)[, 1], centroid_y = st_coordinates(.)[, 2]) - + destination.poly <- destination.layer(polygons) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number(), diff --git a/functions/addNDVI.R b/functions/addNDVI.R index 18a73dc..c9f077f 100644 --- a/functions/addNDVI.R +++ b/functions/addNDVI.R @@ -3,7 +3,7 @@ addNDVI2Links <- function(links, ndviFile, ndviBuffDist) { - # links = networkRestructured[[2]] + # links = networkDensified[[2]] # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" # ndviBuffDist = 30 @@ -28,11 +28,21 @@ addNDVI2Links <- function(links, ndviFile, ndviBuffDist) { group_by(ID) %>% summarise(ndvi = mean(NDVI, na.rm = TRUE)) + # find the mean AND OTHER VALUES of the values for each link + ndvi_values_mean <- ndvi_values %>% + group_by(ID) %>% + summarise(ndvi = mean(NDVI, na.rm = TRUE), + ndvi_md = median(NDVI, na.rm = TRUE), + ndvi_75 = quantile(NDVI, na.rm = TRUE, probs = 0.75), + ndvi_90 = quantile(NDVI, na.rm = TRUE, probs = 0.9)) + # join to the links, using the row number and ID links.with.ndvi <- links %>% mutate(row_no = row_number()) %>% left_join(., ndvi_values_mean, by = c("row_no" = "ID")) %>% dplyr::select(-row_no) + + # st_write(links.with.ndvi, "./SP_working/links_with_NDVI.sqlite", delete_layer = TRUE) return(links.with.ndvi) diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R index 526f295..35b0278 100644 --- a/functions/getDestinationTypes.R +++ b/functions/getDestinationTypes.R @@ -133,35 +133,37 @@ getParking <- function(layer) { } -# 9 railway stations ---- -# Returns list of stations as points -# Note the buffer distance of 100m below; closest railway stations in Melbourne are -# Riversdale & Willison (about 420m) -getStation <- function(points, polygons, lines) { - # general filter to find station objects - filterStation <- function(layer) { - return(layer %>% - filter((public_transport == "station" | public_transport == "stop_position") & - (railway == "station" | railway == "stop" | train == "yes" | - grepl("train", tolower(network)) | grepl("train", tolower(operator))) & - (is.na(tourism) | tourism != "yes") & - (is.na(railway) | railway != "construction"))) - } - - # find each object, and buffer to 100m - buff.dist <- 100 - station.pt <- filterStation(points) %>% st_buffer(buff.dist) - station.poly <- filterStation(polygons) %>% st_buffer(buff.dist) - station.line <- filterStation(lines) %>% st_buffer(buff.dist) - - # dissolve, then separate to individual polygons - stations <- bind_rows(station.pt, station.poly, station.line) %>% - st_union() %>% - st_as_sf() %>% - st_cast("POLYGON") %>% - st_centroid() %>% - # label geometry column - rename(geometry = x) - -} +# 9 railway stations [not used] ---- +# See getPtStops.R instead + +# # Returns list of stations as points +# # Note the buffer distance of 100m below; closest railway stations in Melbourne are +# # Riversdale & Willison (about 420m) +# getStation <- function(points, polygons, lines) { +# # general filter to find station objects +# filterStation <- function(layer) { +# return(layer %>% +# filter((public_transport == "station" | public_transport == "stop_position") & +# (railway == "station" | railway == "stop" | train == "yes" | +# grepl("train", tolower(network)) | grepl("train", tolower(operator))) & +# (is.na(tourism) | tourism != "yes") & +# (is.na(railway) | railway != "construction"))) +# } +# +# # find each object, and buffer to 100m +# buff.dist <- 100 +# station.pt <- filterStation(points) %>% st_buffer(buff.dist) +# station.poly <- filterStation(polygons) %>% st_buffer(buff.dist) +# station.line <- filterStation(lines) %>% st_buffer(buff.dist) +# +# # dissolve, then separate to individual polygons +# stations <- bind_rows(station.pt, station.poly, station.line) %>% +# st_union() %>% +# st_as_sf() %>% +# st_cast("POLYGON") %>% +# st_centroid() %>% +# # label geometry column +# rename(geometry = x) +# +# } diff --git a/functions/getPTStops.R b/functions/getPTStops.R new file mode 100644 index 0000000..96eb1b6 --- /dev/null +++ b/functions/getPTStops.R @@ -0,0 +1,83 @@ +# read in PT stops from GTFS + +# requires tidytransit (loaded in NetworkGenerator.R) + +getPTStops <- function(city, gtfs_feed, outputCrs, nodes_current) { + # city = "Melbourne" + # gtfs_feed = "./data/gtfs.zip" + # outputCrs = 28355 + # edges_current = networkDensified[[2]] + + # read in GTFS feed + gtfs <- read_gtfs(gtfs_feed) %>% + gtfs_as_sf(., crs = 4326) + + # extract stops with their locations + stops <- gtfs$stops %>% + st_transform(outputCrs) + + # limit to stops within the study area (convex hull of edges) + stops <- stops %>% + st_filter(., st_convex_hull(st_union(edges_current)), + predicate = st_intersects) + + # table of stops and route types + stops.routetypes <- gtfs$stop_times %>% + left_join(gtfs$trips, by = "trip_id") %>% + left_join(gtfs$routes, by = "route_id") %>% + + # keep only distinct stop_id and route_type combinations + dplyr::select(stop_id, route_type) %>% + distinct() + + # apply route types + route_types = stops.routetypes$route_type %>% unique() %>% sort() + + if (city == "ProvisionForMunich") { # test should be city is Munich AND stops are in the expected list + + + } else if (!all(route_types %in% c("0", "1", "2", "3", "4", "5", "6", "7", "11", "12"))) { + message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". Unable to process these using +the standard route codes from https://developers.google.com/transit/gtfs/reference, which are: + 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12 monorail. +Edit getPTStops.R to specify the meanings of the codes used in the GTFS Feed. +PT stops will not be included in destinations.") + stops.found = FALSE + + } else { + message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". +Using standard route_type codes from https://developers.google.com/transit/gtfs/reference: + 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12 monorail. +Check that these match the codes used in your GTFS feed.") + stops.routetypes.coded <- stops.routetypes %>% + mutate(pt_stop_type = case_when( + route_type == 0 ~ "tram", + route_type == 1 ~ "metro", + route_type == 2 ~ "train", + route_type == 3 ~ "bus", + route_type == 4 ~ "ferry", + route_type == 5 ~ "cable tram", + route_type == 6 ~ "cable car", + route_type == 7 ~ "funicular", + route_type == 11 ~ "trolleybus", + route_type == 12 ~ "monorail" + )) %>% + dplyr::select(stop_id, pt_stop_type) + + stops.found = TRUE + + } + + if(stops.found) { + stops.with.types <- stops %>% + left_join(stops.routetypes.coded, by = "stop_id") + + return(stops.with.types) + + } else { + return(c()) # empty vector if no stops can be returned + } + + + +} \ No newline at end of file From 0f95a14d0f060b7803c09b9c0b8df624d6cebf9d Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 9 Jan 2024 15:51:27 +1100 Subject: [PATCH 041/103] override cycleway defaults where footpaths tagged as cyclable --- functions/processOsmTags.R | 1 + functions/restructureData.R | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/functions/processOsmTags.R b/functions/processOsmTags.R index c88ae5c..7ae99d2 100644 --- a/functions/processOsmTags.R +++ b/functions/processOsmTags.R @@ -81,6 +81,7 @@ processOsmTags <- function(osm_df,this_defaults_df){ if(any(cycleway_tags=="track")& df$highway[1]!="cycleway") df$cycleway[1]=3 if(any(foot_tags=="no")& df$highway[1]=="cycleway") df$cycleway[1]=5 if(any(car_tags=="no")) df$is_car[1]=0 + if(df$is_car[1]==0 & any(bicycle_tags %in% c("yes", "designated")) & df$cycleway[1]<5) df$cycleway[1]=4 if(any(foot_tags=="no")) df$is_walk[1]=0 if(any(foot_tags %in% c("yes","designated"))) df$is_walk[1]=1 if(df$cycleway[1]>0 | any(bicycle_tags %in% c("yes","designated"))) df$is_cycle[1]=1 diff --git a/functions/restructureData.R b/functions/restructureData.R index 26ec96e..098c85d 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -10,7 +10,7 @@ restructureData <- function(networkList, highway_lookup, # finding merged bikepath ids bikepath_uids <- links %>% st_drop_geometry() %>% - filter(cycleway=="4" & highway_order<15) %>% + filter((cycleway=="4" | cycleway=="5") & highway_order<15) %>% dplyr::select(uid) %>% unlist() %>% as.double() # changing merged bikepaths to regular bikepaths bikepaths <- links %>% From 93696a35c1f8bd0c48e8e96d28d222341c6f0626 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 11 Jan 2024 08:12:45 +1100 Subject: [PATCH 042/103] correct parameters --- functions/getPTStops.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/getPTStops.R b/functions/getPTStops.R index 96eb1b6..e6d4a09 100644 --- a/functions/getPTStops.R +++ b/functions/getPTStops.R @@ -2,7 +2,7 @@ # requires tidytransit (loaded in NetworkGenerator.R) -getPTStops <- function(city, gtfs_feed, outputCrs, nodes_current) { +getPTStops <- function(city, gtfs_feed, outputCrs, edges_current) { # city = "Melbourne" # gtfs_feed = "./data/gtfs.zip" # outputCrs = 28355 From 6a2352cc1dff0703c06d91967d9d63714a8b4e0c Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 11 Jan 2024 13:43:12 +1100 Subject: [PATCH 043/103] download osm extract --- NetworkGenerator.R | 66 ++++++++++++++++++++----------------- functions/addDestinations.R | 3 ++ functions/getOsmExtract.R | 51 ++++++++++++++++++++++++++++ 3 files changed, 90 insertions(+), 30 deletions(-) create mode 100644 functions/getOsmExtract.R diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 684ea1b..72b6a1c 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,9 +4,8 @@ makeNetwork<-function(outputFileName="test"){ # CITY AND ITS PARAMETERS # Set city + # city = "Bendigo" city = "Melbourne" - # city = "Brisbane" - # city = "Munich" # City parameters to be set # • outputCrs: desired coordinate system for network @@ -25,37 +24,37 @@ makeNetwork<-function(outputFileName="test"){ # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) - if (city == "Melbourne") { - outputCrs = 28355 - osmExtract = "./data/melbourne.osm" - networkSqlite = "./data/melbourne_network_unconfigured.sqlite" - cropAreaPoly = "city-of-melbourne_victoria" - demFile = "./data/DEM_melbourne.tif" - osmPbfExtract = "./data/melbourne_australia.osm.pbf" - ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" - gtfs_feed = "./data/gtfs.zip" + if (city == "Bendigo") { + region = "../data/processed/greater_bendigo.sqlite" + outputCrs = 7899 + osmGpkg = "../data/processed/bendigo_osm.gpkg" + # networkSqlite = "./data/brisbane_network_unconfigured.sqlite" + # cropAreaPoly = "" # must set 'crop2TestArea=F' + # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE + # osmPbfExtract = "./data/brisbane_australia.osm.pbf" + # ndviFile = "" # must set 'addNDVI=F' + # gtfs_feed = "./data/SEQ_GTFS.zip" + + } else if (city == "Melbourne") { + region = "../data/processed/greater_melbourne.sqlite" + outputCrs = 7899 + osmGpkg = "../data/processed/melbourne_osm.gpkg" + # networkSqlite = "./data/melbourne_network_unconfigured.sqlite" + # cropAreaPoly = "city-of-melbourne_victoria" + # demFile = "./data/DEM_melbourne.tif" + # osmPbfExtract = "./data/melbourne_australia.osm.pbf" + # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # gtfs_feed = "./data/gtfs.zip" - } else if (city == "Brisbane") { - outputCrs = 28356 - osmExtract = "" # must set 'processOsm=F' - networkSqlite = "./data/brisbane_network_unconfigured.sqlite" - cropAreaPoly = "" # must set 'crop2TestArea=F' - demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE - osmPbfExtract = "./data/brisbane_australia.osm.pbf" - ndviFile = "" # must set 'addNDVI=F' - gtfs_feed = "./data/SEQ_GTFS.zip" - - } else if (city == "Munich") { - outputCrs = 25832 - osmExtract = "" # must set 'processOsm=F' - networkSqlite = "./data/munich_network_unconfigured.sqlite" - cropAreaPoly = "" # must set 'crop2TestArea=F' - demFile = "" # must set 'addElevation=F' - osmPbfExtract = "./data/munich_germany.osm.pbf" - ndviFile = "" # must set 'addNDVI=F' - gtfs_feed = "./data/mvv_gtfs.zip" # to test; if not then >> # must set 'addGtfs=F' and 'addDestinationLayer=F' } + # DOWNLOAD OSM EXTRACT + # A flag for whether to download osm extract for the region (if not, and if + # network needs to be processed, then must have region gpkg + downloadOsm=T + # Distance to buffer region when getting osm extract + regionBufferDist=10000 + # INPUT NETWORK # A flag for whether process raw osm extract or not (if not, must have network sqlite) processOsm=F @@ -138,6 +137,7 @@ makeNetwork<-function(outputFileName="test"){ echo("========================================================\n") echo(" **Network Generation Setting** \n") echo("--------------------------------------------------------\n") + echo(paste0("- Downloading OSM extract: ", downloadOsm,"\n")) echo(paste0("- Starting from OSM extract: ", processOsm,"\n")) echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) @@ -152,6 +152,12 @@ makeNetwork<-function(outputFileName="test"){ echo(" **Launching Network Generation** \n") echo("--------------------------------------------------------\n") + # Downloading OSM + if (downloadOsm) { + echo(paste0("Downloading OSM extract for ", city, "\n")) + getOsmExtract(region, outputCrs, regionBufferDist, osmGpkg) + } + # Processing OSM if(processOsm){ echo(paste0("Starting to process osm extract file, ", osmExtract,"\n")) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 522c239..2b376a2 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -6,6 +6,9 @@ # uses functions for various destination types with tag combinations set out # in 'getDestinationTypes.R' +# NOTE - WILL REQUIRE REFACTORING IF THE .GPKG, RATHER THAN THE .OSM.PBF, IS +# SAVED AS THE BASE FILE - NEED TO EXTRACT THE OTHER TAGS FROM THE .GPKG + addDestinations <- function(nodes_current, edges_current, osmPbfExtract, diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R new file mode 100644 index 0000000..8356c98 --- /dev/null +++ b/functions/getOsmExtract.R @@ -0,0 +1,51 @@ +# function to retrieve OSM extract for given region + +getOsmExtract <- function(region, + outputCrs, + regionBufferDist = 10000, + osmGpkg) { + + # region = "../data/processed/greater_bendigo.sqlite" + # outputCrs = 7899 + # regionBufferDist = 10000 # 10km + # osmGpkg = "../data/processed/bendigo_osm.gpkg" + + # load region and buffer by selected distance (eg 10km) + region <- st_read(region) + region.buffer <- st_buffer(region, regionBufferDist) + + # increase timeout to allow time for large Australia extract to download + default.timeout <- getOption("timeout") + options(timeout = 1200) + + # download the full extract (whole of Australia; quite slow) + echo("Downloading OSM extract\n") + full.extract <- oe_download(oe_match(region, crs = outputCrs)$url, + download_directory = ".") + + # convert to gpkg, including all layers + echo(paste("Converting downloaded OSM extract to .gpkg for selected region\n")) + region.gpkg <- + oe_vectortranslate(full.extract, + layer = st_layers(full.extract)$name, + vectortranslate_options = c("-t_srs", + paste0("EPSG:", outputCrs)), + boundary = region.buffer) + + # save to permanent location + for (i in 1:length(st_layers(region.gpkg)$name)) { + current.layer.name <- st_layers(region.gpkg)$name[i] + st_write(st_read(region.gpkg, layer = current.layer.name), + osmGpkg, + layer = current.layer.name, + delete_layer = TRUE) + } + + # delete full extract and temporary location of region extract, restore timeout to default + unlink(full.extract) + unlink(region.gpkg) + options(timeout = default.timeout) + +} + + From c124aaee81ab1c19bb717533c774e2e6f4ec5f8b Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 15 Jan 2024 10:28:33 +1100 Subject: [PATCH 044/103] processing OSM network, in progress --- NetworkGenerator.R | 98 ++++++----- functions/getOsmExtract.R | 10 +- functions/processOsm.R | 307 +++++++++++++++++++++++++++++++++ functions/splitPathsAtPoints.R | 149 ++++++++++++++++ 4 files changed, 517 insertions(+), 47 deletions(-) create mode 100644 functions/processOsm.R create mode 100644 functions/splitPathsAtPoints.R diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 72b6a1c..2a52e95 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -4,8 +4,8 @@ makeNetwork<-function(outputFileName="test"){ # CITY AND ITS PARAMETERS # Set city - # city = "Bendigo" - city = "Melbourne" + city = "Bendigo" + # city = "Melbourne" # City parameters to be set # • outputCrs: desired coordinate system for network @@ -50,14 +50,16 @@ makeNetwork<-function(outputFileName="test"){ # DOWNLOAD OSM EXTRACT # A flag for whether to download osm extract for the region (if not, and if - # network needs to be processed, then must have region gpkg - downloadOsm=T - # Distance to buffer region when getting osm extract - regionBufferDist=10000 + # network needs to be processed, then must already have osmGpkg file) + downloadOsm=F + regionBufferDist=10000 # Distance to buffer region when getting osm extract + retainDownload=F # Whether to retain downloaded file after region extracted + - # INPUT NETWORK - # A flag for whether process raw osm extract or not (if not, must have network sqlite) - processOsm=F + # NETWORK FROM OSM + # A flag for whether to build unconfigured network from osm extract (if not, + # must already have network sqlite) + networkFromOsm=T # SIMPLIFICATION shortLinkLength=20 @@ -104,30 +106,35 @@ makeNetwork<-function(outputFileName="test"){ # Packages ---------------------------------------------------------------- - library(sf) - library(fs) - library(dplyr) - library(data.table) - library(stringr) - library(igraph) - library(raster) - library(terra) - library(rgdal) - library(purrr) - library(lwgeom) - library(tidytransit) - library(hablar) - library(hms) - library(osmextract) - library(tidyr) + library(sf) + library(fs) + library(dplyr) + library(data.table) + library(stringr) + library(igraph) + library(raster) + library(terra) + library(rgdal) + library(purrr) + library(lwgeom) + library(tidytransit) + library(hablar) + library(hms) + library(osmextract) + library(tidyr) + library(doSNOW) + library(parallel) + library(foreach) + library(nngeo) + # Building the output folder structure ------------------------------------ - - outputDir <- paste0("output/",outputFileName) - if(dir.exists(outputDir)) dir_delete(outputDir) - dir_create(paste0('./',outputDir)) - sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) - if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) + ## COMMENTING THIS OUT FOR NOW BECAUSE IT'S ANNOYING; ADD BACK LATER + # outputDir <- paste0("output/",outputFileName) + # if(dir.exists(outputDir)) dir_delete(outputDir) + # dir_create(paste0('./',outputDir)) + # sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) + # if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) # Functions -------------------------------------------------------------- @@ -138,7 +145,7 @@ makeNetwork<-function(outputFileName="test"){ echo(" **Network Generation Setting** \n") echo("--------------------------------------------------------\n") echo(paste0("- Downloading OSM extract: ", downloadOsm,"\n")) - echo(paste0("- Starting from OSM extract: ", processOsm,"\n")) + echo(paste0("- Processing the OSM extract: ", networkFromOsm,"\n")) echo(paste0("- Cropping to a test area: ", crop2Area,"\n")) echo(paste0("- Shortest link length in network simplification: ", shortLinkLength,"\n")) echo(paste0("- Adding elevation: ", addElevation,"\n")) @@ -159,19 +166,22 @@ makeNetwork<-function(outputFileName="test"){ } # Processing OSM - if(processOsm){ - echo(paste0("Starting to process osm extract file, ", osmExtract,"\n")) - echo(paste0("This might take a while depending on your OSM extract size, ", osmExtract,"\n")) - echo(paste0("Output coordinate system: ", outputCrs, "\n")) - echo(paste0("Note that this step requires Postgres and GDAL/OGR to be installed, see readme for more info.\n")) - networkSqlite="./data/network.sqlite" - if(file_exists(osmExtract)){ - system(paste("./processOSM.sh ", osmExtract, outputCrs, networkSqlite)) - }else{ - warning("OSM extract not found, skipping this step") - } + if(networkFromOsm) { + echo(paste0("Starting to process osm extract file, ", osmGpkg, "\n")) + # networkSqlite="./data/network.sqlite" + # if(file_exists(osmExtract)){ + # system(paste("./processOSM.sh ", osmExtract, outputCrs, networkSqlite)) + # }else{ + # warning("OSM extract not found, skipping this step") + # } + # FINALISE THIS CODE BLOCK DEPENDING ON FINAL STRUCTURE OF processOsm.R + # AND ARRANGMEENTS FOR SAVING + networkSqlite <- processOsm(osmGpkg, outputCrs) + } + return(networkSqlite) ## JUST FOR TESTING - DELETE! + # Note: writing logical fields to sqlite is a bad idea, so switching to integers networkInput <- list(st_read(networkSqlite,layer="nodes",quiet=T), st_read(networkSqlite,layer="edges",quiet=T)) @@ -371,3 +381,5 @@ makeNetwork<-function(outputFileName="test"){ if(writeXml) system.time(exportXML(networkFinal, outputDir)) } +## JUST FOR TESTING +# output <- makeNetwork() \ No newline at end of file diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index 8356c98..b22c1e4 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -19,9 +19,9 @@ getOsmExtract <- function(region, options(timeout = 1200) # download the full extract (whole of Australia; quite slow) - echo("Downloading OSM extract\n") - full.extract <- oe_download(oe_match(region, crs = outputCrs)$url, - download_directory = ".") + download.url <- oe_match(region, crs = outputCrs)$url + echo(paste("Downloading OSM extract from", download.url, "\n")) + full.extract <- oe_download(download.url, download_directory = ".") # convert to gpkg, including all layers echo(paste("Converting downloaded OSM extract to .gpkg for selected region\n")) @@ -42,7 +42,9 @@ getOsmExtract <- function(region, } # delete full extract and temporary location of region extract, restore timeout to default - unlink(full.extract) + if (!retainDownload) { + unlink(full.extract) + } unlink(region.gpkg) options(timeout = default.timeout) diff --git a/functions/processOsm.R b/functions/processOsm.R new file mode 100644 index 0000000..ff41a1d --- /dev/null +++ b/functions/processOsm.R @@ -0,0 +1,307 @@ +# function to convert OSM .gpkg file into network of nodes and edges + + +library(sf) +library(fs) +library(dplyr) +library(lwgeom) +library(nngeo) +library(stringr) +library(doSNOW) +library(parallel) +library(foreach) +library(ggplot2) +source("./functions/etc/logging.R") +source("./functions/splitPathsAtPoints.R") +outputCrs = 7899 + +processOsm <- function(osmGpkg, outputCrs) { + + osmGpkg = "../data/processed/bendigo_osm.gpkg" + # osmGpkg = "../data/processed/melbourne_osm.gpkg" + + # read in OSM data + # -----------------------------------# + + # read in osmGpkg lines and points + osm.lines <- st_read(osmGpkg, layer = "lines") %>% + st_set_geometry("geom") + osm.points <- st_read(osmGpkg, layer = "points") %>% + st_set_geometry("geom") + + + # paths and intersections + # -----------------------------------# + + # extract the roads and other paths + echo("Extracting paths from OSM data\n") + paths <- osm.lines %>% + # filter to highways + filter(!is.na(highway)) %>% + # exclude specific highway types + filter(!highway %in% c("bridleway", "bus_stop", "co", "platform", + "raceway", "services", "traffic_island")) %>% + # exclude non-current highways + filter(!str_detect(highway, "construction|proposed|disused|abandoned")) %>% + # exclude other tags indicating not usable or current + filter(is.na(other_tags) | + !str_detect(other_tags, '"access"=>"private"|"abandoned"=>"yes"')) %>% + # snap to grid and retain relevant fields + st_snap_to_grid(1) %>% + dplyr::select(osm_id, highway, other_tags) %>% + # add bridge/tunnel column (0 for neither, 1 for bridge, 2 for tunnel) + mutate(bridge_tunnel = case_when(str_detect(other_tags, "bridge") ~ 1, + str_detect(other_tags, "tunnel") ~ 2, + TRUE ~ 0)) + + # temp dev notes, comparing to processOSM.sh (SP) + # (1) I did not remove other_tags LIKE 'busbar'; these are electrical facilities, + # but they are all highway=NA, so are removed anyway + # (2) I did not remove all other_tags LIKE 'abandoned'; this is too broad, and + # removes cycleways that are "railway"=>"abandoned" or similar; instead, + # narrowed to "abandoned"=>"yes" + # (3) separate codes for bridges and tunnels, rather than 'TRUE' for both + # (4) some paths have a 'level' or 'layer' tag that could be used to separate + # out different levels; however these are mostly within shopping centres + # or multi-storey carparks, and probably aren't of much interest to us + + + # find intersections, but excluding any on different levels as + # determined by bridge_tunnel + + # intersection points - intersect paths with a copy of itself, select points, + # keep where bridge/tunnel matches + echo("Finding path intersections\n") + system.time( + intersections <- paths %>% + # intersect with itself - produces a separate point for each pair of links + # that intersect at an intersection (and also produces line intersections) + st_intersection(., paths %>% dplyr::select(bridge_tunnel_a = bridge_tunnel)) %>% + # keep just the points + st_collection_extract("POINT") %>% + # only keep where bridge_tunnel match (at grade, both bridge or both tunnel) + filter(bridge_tunnel == bridge_tunnel_a) %>% + # combine where same location with same osm_id + group_by(osm_id, geom) %>% + summarise() %>% + ungroup() + ) + + + # temp dev notes (SP): + # (1) compared to network.sql, this only places intersections where both are + # bridges, both are tunnels, or both are neither (whereas network.sql, + # could make intersection where one is bridge and one is tunnel) + # (2) this will not succeed in excluding intersections for multi-level bridges + # over the top of each other (but in the absence of Z data, I don't have + # any good ideas for doing that) + + + # split paths at intersections + # -----------------------------------# + + # split paths at intersections with matching osm_ids + echo(paste("Splitting", nrow(paths), "paths at intersections\n")) + split.path.list <- splitPathsAtPoints(paths, intersections, 0.001, "osm_id") + + # convert to dataframe, snap to grid, remove empty geometries, add unique id + echo("Combining the split paths into a single dataframe") + system.time( + split.paths <- bind_rows(split.path.list) %>% + st_snap_to_grid(1) %>% + # remove empty geometries + filter(!st_is_empty(geom)) %>% + # add unique id + mutate(path_id = row_number()) + ) + + # find endpoints of each split path + echo("Finding endpoints of the split paths") + endpoints <- rbind(lwgeom::st_startpoint(split.paths) %>% st_sf(), + lwgeom::st_endpoint(split.paths) %>% st_sf()) %>% + # remove duplicates (produces multipoint) + summarise() %>% + # convert multipoint to point + st_cast("POINT") %>% + # add unique id + mutate(endpoint_id = row_number()) + + # find split.paths that have more than 2 endpoints within 0.1m, in order + # to re-split at ajdacent endpoints + echo("Finding paths that need to be re-split") + paths.with.nearby.endpoints <- split.paths %>% + # joint endpoints within 0.1m + st_join(endpoints %>% st_buffer(0.1), join = st_intersects) + + multiple.endpoint.paths <- paths.with.nearby.endpoints %>% + # count endpoints + st_drop_geometry() %>% + group_by(path_id) %>% + summarise(n_endpoints = n()) %>% + ungroup() %>% + # filter to those with more than 2 + filter(n_endpoints > 2) + + # get the paths with more than 2 endpoints - these need to be resplit + paths.to.resplit <- split.paths %>% + filter(path_id %in% multiple.endpoint.paths$path_id) + + + # do a second round of splitting: re-split the paths that have adjacent endpoints, + # using 0.1 distance this time, but only where adjacent endpoint is an endpoint + # for a path that has the same bridge_tunnel status as the path to be resplit + echo(paste("Re-splitting", nrow(paths.to.resplit), "paths at adjacent endpoints")) + + endpoints.for.resplit <- paths.with.nearby.endpoints %>% + # just keep paths that need to be resplit, with their bridge_tunnel status + filter(path_id %in% paths.to.resplit$path_id) %>% + st_drop_geometry() %>% + rename(path_bridge_tunnel = bridge_tunnel) %>% + # join the endpoint geometries + left_join(endpoints, by = "endpoint_id") %>% + st_sf() %>% + # join the bridge_tunnel status of each path that intersects the endpoint + # (this is the endpoint's bridge_tunnel status, but it could have more than one, + # say where a path enters a tunnel) + st_join(paths %>% dplyr::select(endpoint_bridge_tunnel = bridge_tunnel), + join = st_intersects) %>% + # only keep the endpoints if bridge_tunnel status for the endpoint + # matches the bridge_tunnel status of the path to be resplit + filter(path_bridge_tunnel == endpoint_bridge_tunnel) %>% + distinct() + + resplit.path.list <- + splitPathsAtPoints(paths.to.resplit, endpoints.for.resplit, 0.1, "path_id") + + # convert to dataframe, snap to grid, remove empty geometries + echo("Combining the resplit paths into a single dataframe") + system.time( + resplit.paths <- bind_rows(resplit.path.list) %>% + st_snap_to_grid(1) %>% + filter(!st_is_empty(geom)) + ) + + # remove paths that needed to be resplit, and replace with resplit paths + combined.paths <- split.paths %>% + filter(!path_id %in% paths.to.resplit$path_id) %>% + rbind(resplit.paths) %>% + dplyr::select(osm_id) %>% + # add a new id field, for joining to from and to id's + mutate(combined_path_id = row_number()) + + # temp dev notes (SP): + # (1) compared to network.sql, the second round only resplits at adjacent + # endpoints if those adjacent endpoints are on paths with the same + # bridge_tunnel status as the path to be resplit + # (2) however, resplitting at adjacent endpoints can result in multilevel + # intersections, for example in shopping centres or multi-storey carparks, + # which could be avoided (to some extent) by also matching on the 'layer' + # or 'level' tag status + + + # finalise paths + # -----------------------------------# + + # find from and to id's from endpoints + from_ids <- combined.paths %>% + dplyr::select(combined_path_id) %>% + # take startpoint jeometry, and do spatial join to endpoints + st_set_geometry(lwgeom::st_startpoint(.)) %>% + st_join(endpoints %>% rename(from_id = endpoint_id), + join = st_intersects) %>% + st_drop_geometry() + + to_ids <- combined.paths %>% + dplyr::select(combined_path_id) %>% + # take endpoint jeometry, and do spatial join to endpoints + st_set_geometry(lwgeom::st_endpoint(.)) %>% + st_join(endpoints %>% rename(to_id = endpoint_id), + join = st_intersects) %>% + st_drop_geometry() + + # assemble final paths with length, from_id and to_id + final.paths <- combined.paths %>% + # add length column + mutate(length = as.integer(st_length(geom))) %>% + + # join from_id and to_id + left_join(from_ids, by = "combined_path_id") %>% + left_join(to_ids, by = "combined_path_id") %>% + + # select final fields + dplyr::select(osm_id, length, from_id, to_id) + + + + + + # nodes..... + # -----------------------------------# + + + # extract the traffic signals [MOVE THIS DOWN TO THE BIT DEALING WITH INTERSECTIONS] + traffic.signals <- osm.points %>% + # filter to traffic signals + filter(str_detect(highway, "traffic_signals")) %>% + # snap to grid and retain relevant fields + st_snap_to_grid(1) %>% + dplyr::select(osm_id, highway, other_tags) + + + + + + + #------ working section + + # # read in temporary tables + # sql.tables <- "./SP_working/temp_melb_sql_tables.sqlite" + # roads <- st_read(sql.tables, layer = "roads") + # + # write out temporary Bendigo outputs + bend.out <- "./SP_working/temp_bendigo.sqlite" + st_write(paths, bend.out, layer = "paths", delete_layer = T) + st_write(intersections, bend.out, layer = "intersections", delete_layer = T) + st_write(split.paths, bend.out, layer = "split_paths", delete_layer = T) + st_write(endpoints, bend.out, layer = "endpoints", delete_layer = T) + st_write(final.paths, bend.out, layer = "final_paths", delete_layer = T) + + st_write(split.paths, bend.out, layer = "split_paths_diff_loop", delete_layer = T) + st_write(paths.to.resplit, bend.out, layer = "paths_to_resplit", delete_layer = T) + + st_delete(bend.out, layer = "split_paths_unbuffered_point_diff") + + paths <- st_read(bend.out, layer = "paths") + intersections <- st_read(bend.out, layer = "intersections") + split.paths <- st_read(bend.out, layer = "split_paths") + endpoints <- st_read(bend.out, layer = "endpoints") + final.paths <- st_read(bend.out, layer = "final_paths") + + # write out / read in temporary Melbourne outputs + melb.out <- "./SP_working/temp_melbourne.sqlite" + st_write(paths, melb.out, layer = "paths", delete_layer = T) + st_write(intersections, melb.out, layer = "intersections", delete_layer = T) + st_write(split.paths, melb.out, layer = "split_paths", delete_layer = T) + st_write(endpoints, melb.out, layer = "endpoints", delete_layer = T) + st_write(final.paths, melb.out, layer = "final_paths", delete_layer = T) + + st_write(paths.to.resplit, melb.out, layer = "paths_to_resplit", delete_layer = T) + st_write(problem.paths, melb.out, layer = "problem_paths", delete_layer = T) + + st_delete(melb.out, layer = "problem_paths") + + paths <- st_read(melb.out, layer = "paths") + intersections <- st_read(melb.out, layer = "intersections") + split.paths <- st_read(melb.out, layer = "split_paths") + endpoints <- st_read(melb.out, layer = "endpoints") + final.paths <- st_read(melb.out, layer = "final_paths") + + #--------end working section + + # temporary return statement for testing + return(list(paths, intersections, endpoints, final.paths)) + + + +} + diff --git a/functions/splitPathsAtPoints.R b/functions/splitPathsAtPoints.R new file mode 100644 index 0000000..4247b6e --- /dev/null +++ b/functions/splitPathsAtPoints.R @@ -0,0 +1,149 @@ +# function to split paths at points within a given distance of the path, +# using matching field (such as osm_id) in both paths and points + +splitPathsAtPoints <- function(paths, points, buff.dist, field) { + + # paths = paths + # points = intersections + # buff.dist = 0.001 # distance to which points are buffered to split + # field = "osm_id" + + # paths = paths.to.resplit + # points = endpoints.with.path.id + # buff.dist = 0.1 + # field = "path_id" + + # run function once if no more than 100k paths, otherwise break into groups + if (nrow(paths) <= 100000) { + + # buffer points to buffer distance + echo("Buffering points to split paths\n") + buffered.points <- st_buffer(points, buff.dist) + + # run the loop to split paths at points + split.path.list <- splitPathsAtPointsParallelLoop(paths, + buffered.points, + field) + + } else { + + # break larger path sets into groups for speed and memory + + group.nos <- seq(1:ceiling(nrow(paths) / 100000)) # groups of up to 100000 + groups <- c() + + for (i in group.nos) { + + echo(paste("Splitting group", i, "of", max(group.nos), "groups of paths\n")) + + # group of paths + start.no <- ((group.nos[i] - 1) * 100000) + 1 + end.no <- min(group.nos[i] * 100000, nrow(paths)) + group.paths <- paths[start.no:end.no, ] + + # points used in that group + group.points <- points %>% + filter(osm_id %in% group.paths$osm_id) + + # buffer points to buffer distance + echo("Buffering points to split paths\n") + group.buffered.points <- st_buffer(group.points, buff.dist) + + # for the group, run the loop to split paths at points + group.split.path.list <- + splitPathsAtPointsParallelLoop(group.paths, + group.buffered.points, + field) + + # temporarily save output + group.output.name <- paste0("split_path_list_", i) + saveRDS(group.split.path.list, paste0("./", group.output.name, ".rds")) + groups <- c(groups, group.output.name) + + } + + # retrieve and assemble temporarily saved outputs + split.path.list <- list() + for (i in 1:length(groups)) { + # read in the RDS and add it to split.path.list + rds.path <- paste0("./", groups[i], ".rds") + split.path.list <- c(split.path.list, readRDS(rds.path)) + # delete the RDS + unlink(rds.path) + + } + + } + + return(split.path.list) + +} + + +splitPathsAtPointsParallelLoop <- function(paths, + buffered.points, + field) { + + # setup for parallel processing - detect no of available cores and create cluster + cores <- detectCores() + cluster <- parallel::makeCluster(cores) + doSNOW::registerDoSNOW(cluster) + + # set up progress reporting + pb <- txtProgressBar(max = nrow(paths), style = 3) + progress <- function(n) setTxtProgressBar(pb, n) + opts <- list(progress = progress) + + # report progress + echo(paste("Splitting", nrow(paths), "paths; parallel processing with", + cores, "cores\n")) + + # loop to split paths + split.path.list <- + foreach(i = 1:nrow(paths), + # foreach(i = 1:10, + .packages = c("dplyr", "sf"), + .options.snow = opts) %dopar% { + + path <- paths[i,] + + # intersections with the same variable field name (say, 'osm_id) as + # path: '.data[[field]]' is the variable field in the current data, + # which is 'buffered points'; 'path[[field]]' is the same field in 'path' + + path.intersections <- buffered.points %>% + filter(.data[[field]] == path[[field]]) + + + # split paths at intersections + if(nrow(path.intersections) > 0) { + + path.list <- path %>% st_geometry() + + path.intersections.list <- path.intersections %>% + # convert to multipoint + summarise() %>% + st_geometry() + + path.list.segmented <- st_difference(path.list, + path.intersections.list) + split.path <- path %>% + st_set_geometry(st_sfc(path.list.segmented)) %>% + st_sf() %>% + st_cast(to = "LINESTRING") + + } else { + split.path <- path + } + + return(split.path) + + } + + # close the progress bar and cluster + close(pb) + stopCluster(cluster) + + return(split.path.list) + +} From 1f78b622dcb763886a8d7b5235866783369ee765 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 18 Jan 2024 09:10:43 +1100 Subject: [PATCH 045/103] process OSM network --- NetworkGenerator.R | 107 ++++++++++++++++--------------- functions/processOsm.R | 141 +++++++++++++++++------------------------ 2 files changed, 110 insertions(+), 138 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 2a52e95..fe7f9db 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -1,17 +1,21 @@ -makeNetwork<-function(outputFileName="test"){ - # outputFileName="network" +makeNetwork<-function(city, outputFileName = "test"){ + # city = "Bendigo" + # city = "Melbourne" + + # outputFileName = "network" + # Parameters -------------------------------------------------------------- - # CITY AND ITS PARAMETERS - # Set city - city = "Bendigo" - # city = "Melbourne" - + # CITY PARAMETERS # City parameters to be set + # • region: if 'downloadOsm=T', file delineating the boundary of the area for + # which Osm extract is to be downloaded (assumed to be in sqlite format + # with a single layer) # • outputCrs: desired coordinate system for network - # • osmExtract: if 'processOsm=T', OSM extract file in .osm format (.osm.pbf - # not supported for this step) - # • networkSqlite: if 'processOsm=F', network sqlite file + # • osmGpkg: location where downloaded OSM extract for region is to be stored + # (if 'downloadOsm=T') and/or read from (if 'processOsm=T') + # • unconfiguredSqlite: location where processed OSM file is to be stored + # (if 'processOsm=T') or read from (if 'processOsm=F') # • cropAreaPoly: if 'crop2TestArea=T' cropArea location from # https://github.com/JamesChevalier/cities/tree/master/australia/victoria # (only supported for Victoria at this stage) @@ -28,8 +32,8 @@ makeNetwork<-function(outputFileName="test"){ region = "../data/processed/greater_bendigo.sqlite" outputCrs = 7899 osmGpkg = "../data/processed/bendigo_osm.gpkg" - # networkSqlite = "./data/brisbane_network_unconfigured.sqlite" - # cropAreaPoly = "" # must set 'crop2TestArea=F' + unconfiguredSqlite = "../data/processed/bendigo_network_unconfigured.sqlite" + # cropAreaPoly = "" # must set 'crop2Area=F' # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE # osmPbfExtract = "./data/brisbane_australia.osm.pbf" # ndviFile = "" # must set 'addNDVI=F' @@ -39,13 +43,16 @@ makeNetwork<-function(outputFileName="test"){ region = "../data/processed/greater_melbourne.sqlite" outputCrs = 7899 osmGpkg = "../data/processed/melbourne_osm.gpkg" - # networkSqlite = "./data/melbourne_network_unconfigured.sqlite" + unconfiguredSqlite = "../data/processed/melbourne_network_unconfigured.sqlite" # cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" # osmPbfExtract = "./data/melbourne_australia.osm.pbf" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" # gtfs_feed = "./data/gtfs.zip" + } else { + echo(paste("City parameters for", city, "have not been set; unable to proceed\n")) + return() } # DOWNLOAD OSM EXTRACT @@ -55,11 +62,11 @@ makeNetwork<-function(outputFileName="test"){ regionBufferDist=10000 # Distance to buffer region when getting osm extract retainDownload=F # Whether to retain downloaded file after region extracted - # NETWORK FROM OSM # A flag for whether to build unconfigured network from osm extract (if not, - # must already have network sqlite) + # must already have unconfigured sqlite) networkFromOsm=T + saveUnconfigured=T # SIMPLIFICATION shortLinkLength=20 @@ -109,6 +116,7 @@ makeNetwork<-function(outputFileName="test"){ library(sf) library(fs) library(dplyr) + library(tidyr) library(data.table) library(stringr) library(igraph) @@ -129,12 +137,11 @@ makeNetwork<-function(outputFileName="test"){ # Building the output folder structure ------------------------------------ - ## COMMENTING THIS OUT FOR NOW BECAUSE IT'S ANNOYING; ADD BACK LATER - # outputDir <- paste0("output/",outputFileName) - # if(dir.exists(outputDir)) dir_delete(outputDir) - # dir_create(paste0('./',outputDir)) - # sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) - # if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) + outputDir <- paste0("output/",outputFileName) + if(dir.exists(outputDir)) dir_delete(outputDir) + dir_create(paste0('./',outputDir)) + sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) + if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) # Functions -------------------------------------------------------------- @@ -165,47 +172,39 @@ makeNetwork<-function(outputFileName="test"){ getOsmExtract(region, outputCrs, regionBufferDist, osmGpkg) } - # Processing OSM + # Processing OSM, or loading existing layers if not required if(networkFromOsm) { echo(paste0("Starting to process osm extract file, ", osmGpkg, "\n")) - # networkSqlite="./data/network.sqlite" - # if(file_exists(osmExtract)){ - # system(paste("./processOSM.sh ", osmExtract, outputCrs, networkSqlite)) - # }else{ - # warning("OSM extract not found, skipping this step") - # } - # FINALISE THIS CODE BLOCK DEPENDING ON FINAL STRUCTURE OF processOsm.R - # AND ARRANGMEENTS FOR SAVING - networkSqlite <- processOsm(osmGpkg, outputCrs) + networkUnconfigured <- processOsm(osmGpkg, outputCrs) + if (saveUnconfigured) { + st_write(networkUnconfigured[[1]], unconfiguredSqlite, layer = "nodes", delete_layer = T) + st_write(networkUnconfigured[[2]], unconfiguredSqlite, layer = "edges", delete_layer = T) + st_write(networkUnconfigured[[3]], unconfiguredSqlite, layer = "osm_metadata", delete_layer = T) + } + + } else { + + if (file_exists(unconfiguredSqlite)) { + echo(paste("Reading in existing unconfigured network,", unconfiguredSqlite, "\n")) + networkUnconfigured <- + list(st_read(unconfiguredSqlite, layer = "nodes") %>% st_set_geometry("geom"), + st_read(unconfiguredSqlite, layer = "edges") %>% st_set_geometry("geom")) + + } else { + echo(paste("Unconfigured network file", unconfiguredSqlite, "not found; unable to proceed\n")) + return() + } } - return(networkSqlite) ## JUST FOR TESTING - DELETE! - - # Note: writing logical fields to sqlite is a bad idea, so switching to integers - networkInput <- list(st_read(networkSqlite,layer="nodes",quiet=T), - st_read(networkSqlite,layer="edges",quiet=T)) - - # We run into trouble if the geometry column is 'geom' instead of 'GEOMETRY' - if('GEOMETRY'%in%colnames(networkInput[[1]])) { - networkInput[[1]]<-networkInput[[1]]%>%rename(geom=GEOMETRY) - } - if('GEOMETRY'%in%colnames(networkInput[[2]])) { - networkInput[[2]]<-networkInput[[2]]%>%rename(geom=GEOMETRY) - } - - cat(paste0("Network input, nodes:\n")) - str(networkInput[[1]]) - # print.data.frame(head(networkInput[[1]])) - cat(paste0("\nNetwork input, edges:\n")) - str(networkInput[[2]]) - cat(paste0("\n")) - + # TO DO FROM HERE, and fix up 'networkInput' (now 'networkUnconfigured', and probably + # read in osm_metadata above, after 'networkUnconfigured <-') + # crop to test area if required if(crop2Area)system.time(networkInput <- crop2Poly(networkInput, cropAreaPoly, outputCrs)) echo("processing OSM meta data\n") - osm_metadata <- st_read(networkSqlite,layer="osm_metadata",quiet=T) %>% + osm_metadata <- st_read(unconfiguredSqlite,layer="osm_metadata",quiet=T) %>% filter(osm_id%in%networkInput[[2]]$osm_id) echo("Building default OSM attribute tables\n") defaults_df <- buildDefaultsDF() @@ -382,4 +381,4 @@ makeNetwork<-function(outputFileName="test"){ } ## JUST FOR TESTING -# output <- makeNetwork() \ No newline at end of file +output <- makeNetwork(city = "Bendigo") diff --git a/functions/processOsm.R b/functions/processOsm.R index ff41a1d..b1ea997 100644 --- a/functions/processOsm.R +++ b/functions/processOsm.R @@ -1,24 +1,10 @@ # function to convert OSM .gpkg file into network of nodes and edges - -library(sf) -library(fs) -library(dplyr) -library(lwgeom) -library(nngeo) -library(stringr) -library(doSNOW) -library(parallel) -library(foreach) -library(ggplot2) -source("./functions/etc/logging.R") -source("./functions/splitPathsAtPoints.R") -outputCrs = 7899 - processOsm <- function(osmGpkg, outputCrs) { - osmGpkg = "../data/processed/bendigo_osm.gpkg" + # osmGpkg = "../data/processed/bendigo_osm.gpkg" # osmGpkg = "../data/processed/melbourne_osm.gpkg" + # outputCrs = 7899 # read in OSM data # -----------------------------------# @@ -87,7 +73,6 @@ processOsm <- function(osmGpkg, outputCrs) { ungroup() ) - # temp dev notes (SP): # (1) compared to network.sql, this only places intersections where both are # bridges, both are tunnels, or both are neither (whereas network.sql, @@ -105,7 +90,7 @@ processOsm <- function(osmGpkg, outputCrs) { split.path.list <- splitPathsAtPoints(paths, intersections, 0.001, "osm_id") # convert to dataframe, snap to grid, remove empty geometries, add unique id - echo("Combining the split paths into a single dataframe") + echo("Combining the split paths into a single dataframe\n") system.time( split.paths <- bind_rows(split.path.list) %>% st_snap_to_grid(1) %>% @@ -116,19 +101,20 @@ processOsm <- function(osmGpkg, outputCrs) { ) # find endpoints of each split path - echo("Finding endpoints of the split paths") + echo("Finding endpoints of the split paths\n") endpoints <- rbind(lwgeom::st_startpoint(split.paths) %>% st_sf(), lwgeom::st_endpoint(split.paths) %>% st_sf()) %>% # remove duplicates (produces multipoint) summarise() %>% # convert multipoint to point st_cast("POINT") %>% - # add unique id - mutate(endpoint_id = row_number()) + # add unique id and set geometry column name + mutate(endpoint_id = row_number()) %>% + st_set_geometry("geom") # find split.paths that have more than 2 endpoints within 0.1m, in order # to re-split at ajdacent endpoints - echo("Finding paths that need to be re-split") + echo("Finding paths that need to be re-split\n") paths.with.nearby.endpoints <- split.paths %>% # joint endpoints within 0.1m st_join(endpoints %>% st_buffer(0.1), join = st_intersects) @@ -150,7 +136,7 @@ processOsm <- function(osmGpkg, outputCrs) { # do a second round of splitting: re-split the paths that have adjacent endpoints, # using 0.1 distance this time, but only where adjacent endpoint is an endpoint # for a path that has the same bridge_tunnel status as the path to be resplit - echo(paste("Re-splitting", nrow(paths.to.resplit), "paths at adjacent endpoints")) + echo(paste("Re-splitting", nrow(paths.to.resplit), "paths at adjacent endpoints\n")) endpoints.for.resplit <- paths.with.nearby.endpoints %>% # just keep paths that need to be resplit, with their bridge_tunnel status @@ -174,7 +160,7 @@ processOsm <- function(osmGpkg, outputCrs) { splitPathsAtPoints(paths.to.resplit, endpoints.for.resplit, 0.1, "path_id") # convert to dataframe, snap to grid, remove empty geometries - echo("Combining the resplit paths into a single dataframe") + echo("Combining the resplit paths into a single dataframe\n") system.time( resplit.paths <- bind_rows(resplit.path.list) %>% st_snap_to_grid(1) %>% @@ -185,7 +171,6 @@ processOsm <- function(osmGpkg, outputCrs) { combined.paths <- split.paths %>% filter(!path_id %in% paths.to.resplit$path_id) %>% rbind(resplit.paths) %>% - dplyr::select(osm_id) %>% # add a new id field, for joining to from and to id's mutate(combined_path_id = row_number()) @@ -199,7 +184,7 @@ processOsm <- function(osmGpkg, outputCrs) { # or 'level' tag status - # finalise paths + # finalise paths with metadata # -----------------------------------# # find from and to id's from endpoints @@ -220,26 +205,41 @@ processOsm <- function(osmGpkg, outputCrs) { st_drop_geometry() # assemble final paths with length, from_id and to_id - final.paths <- combined.paths %>% + final.paths.with.metadata <- combined.paths %>% # add length column mutate(length = as.integer(st_length(geom))) %>% # join from_id and to_id left_join(from_ids, by = "combined_path_id") %>% - left_join(to_ids, by = "combined_path_id") %>% - - # select final fields - dplyr::select(osm_id, length, from_id, to_id) + left_join(to_ids, by = "combined_path_id") - - - - # nodes..... + # nodes # -----------------------------------# + + echo("Finding roundabout and traffic signal status of nodes\n") + # extract from_id and to_id of edges that are roundabouts + roundabout.edges <- final.paths.with.metadata %>% + st_drop_geometry() %>% + # left_join(osm.metadata, by = "osm_id") %>% # <<< don't now need this + # is_roundabout: 1 if 'roundabout' in 'other_tags'; 0 if not, or if 'other_tags' is NA + mutate(is_roundabout = case_when( + is.na(other_tags) ~ 0, + str_detect(other_tags, "roundabout") ~ 1, + TRUE ~ 0)) %>% + dplyr::select(from_id, to_id, is_roundabout) + + # find node ids that connect to roundabouts + roundabout.nodes <- roundabout.edges %>% + filter(is_roundabout == 1) %>% + # combine from_id and to_id columns into new 'id' column + tidyr::gather(key = "key", value = "id", from_id, to_id) %>% + # keep distinct ids + dplyr::select(id) %>% + distinct() - # extract the traffic signals [MOVE THIS DOWN TO THE BIT DEALING WITH INTERSECTIONS] + # extract the traffic signals traffic.signals <- osm.points %>% # filter to traffic signals filter(str_detect(highway, "traffic_signals")) %>% @@ -247,61 +247,34 @@ processOsm <- function(osmGpkg, outputCrs) { st_snap_to_grid(1) %>% dplyr::select(osm_id, highway, other_tags) + # find endpoints within 20m of traffic signals + endpoints.near.signals <- endpoints %>% + st_filter(st_buffer(traffic.signals, 20), .predicate = st_intersects) %>% + .$endpoint_id + # finalise nodes: attribute with roundabout and signal status + final.nodes <- endpoints %>% + mutate(is_roundabout = ifelse(endpoint_id %in% roundabout.nodes$id, 1, 0), + is_signal = ifelse(endpoint_id %in% endpoints.near.signals, 1, 0)) - - - - #------ working section - - # # read in temporary tables - # sql.tables <- "./SP_working/temp_melb_sql_tables.sqlite" - # roads <- st_read(sql.tables, layer = "roads") - # - # write out temporary Bendigo outputs - bend.out <- "./SP_working/temp_bendigo.sqlite" - st_write(paths, bend.out, layer = "paths", delete_layer = T) - st_write(intersections, bend.out, layer = "intersections", delete_layer = T) - st_write(split.paths, bend.out, layer = "split_paths", delete_layer = T) - st_write(endpoints, bend.out, layer = "endpoints", delete_layer = T) - st_write(final.paths, bend.out, layer = "final_paths", delete_layer = T) - - st_write(split.paths, bend.out, layer = "split_paths_diff_loop", delete_layer = T) - st_write(paths.to.resplit, bend.out, layer = "paths_to_resplit", delete_layer = T) - st_delete(bend.out, layer = "split_paths_unbuffered_point_diff") - - paths <- st_read(bend.out, layer = "paths") - intersections <- st_read(bend.out, layer = "intersections") - split.paths <- st_read(bend.out, layer = "split_paths") - endpoints <- st_read(bend.out, layer = "endpoints") - final.paths <- st_read(bend.out, layer = "final_paths") - - # write out / read in temporary Melbourne outputs - melb.out <- "./SP_working/temp_melbourne.sqlite" - st_write(paths, melb.out, layer = "paths", delete_layer = T) - st_write(intersections, melb.out, layer = "intersections", delete_layer = T) - st_write(split.paths, melb.out, layer = "split_paths", delete_layer = T) - st_write(endpoints, melb.out, layer = "endpoints", delete_layer = T) - st_write(final.paths, melb.out, layer = "final_paths", delete_layer = T) - - st_write(paths.to.resplit, melb.out, layer = "paths_to_resplit", delete_layer = T) - st_write(problem.paths, melb.out, layer = "problem_paths", delete_layer = T) - - st_delete(melb.out, layer = "problem_paths") - - paths <- st_read(melb.out, layer = "paths") - intersections <- st_read(melb.out, layer = "intersections") - split.paths <- st_read(melb.out, layer = "split_paths") - endpoints <- st_read(melb.out, layer = "endpoints") - final.paths <- st_read(melb.out, layer = "final_paths") + # separate final paths and osm metadata + # -----------------------------------# - #--------end working section + # remove metadata from final paths + final.paths <- final.paths.with.metadata %>% + dplyr::select(osm_id, length, from_id, to_id) - # temporary return statement for testing - return(list(paths, intersections, endpoints, final.paths)) + # extract the non-spatial data for the paths + osm.metadata <- paths %>% + st_drop_geometry() %>% + dplyr::select(osm_id, highway, other_tags) %>% + filter(osm_id %in% final.paths$osm_id) + # return outputs + # -----------------------------------# + return(list(final.nodes, final.paths, osm.metadata)) } From 5f6cd457607c23d267d2ba611879e3de25276130 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 18 Jan 2024 17:10:31 +1100 Subject: [PATCH 046/103] correct id field --- functions/processOsm.R | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/functions/processOsm.R b/functions/processOsm.R index b1ea997..2aa30b5 100644 --- a/functions/processOsm.R +++ b/functions/processOsm.R @@ -255,7 +255,8 @@ processOsm <- function(osmGpkg, outputCrs) { # finalise nodes: attribute with roundabout and signal status final.nodes <- endpoints %>% mutate(is_roundabout = ifelse(endpoint_id %in% roundabout.nodes$id, 1, 0), - is_signal = ifelse(endpoint_id %in% endpoints.near.signals, 1, 0)) + is_signal = ifelse(endpoint_id %in% endpoints.near.signals, 1, 0)) %>% + rename(id = endpoint_id) # separate final paths and osm metadata From 8d5cbdc8abe78ad429a8bd0619618b023a609a25 Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 19 Jan 2024 12:21:15 +1100 Subject: [PATCH 047/103] change default mode for 'track' from 'cycle' to both 'cycle' and 'walk' --- functions/buildDefaultsDF.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/buildDefaultsDF.R b/functions/buildDefaultsDF.R index 63c32d5..30253a7 100644 --- a/functions/buildDefaultsDF.R +++ b/functions/buildDefaultsDF.R @@ -20,7 +20,7 @@ buildDefaultsDF <- function(){ "living_street" , 1 , (40/3.6) , 0 , 300 , 1 , 1 , 1 , 14 , "cycleway" , 1 , (30/3.6) , 0 , 300 , 1 , 0 , 0 , 15 , - "track" , 1 , (30/3.6) , 0 , 300 , 1 , 0 , 0 , 16 , + "track" , 1 , (30/3.6) , 0 , 300 , 1 , 1 , 0 , 16 , "service" , 1 , (40/3.6) , 0 , 300 , 1 , 1 , 1 , 17 , "pedestrian" , 1 , (30/3.6) , 0 , 120 , 0 , 1 , 0 , 18 , From 0ae2423aaf2340ca8f63052c178142438ea0a3c3 Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 22 Jan 2024 17:23:29 +1100 Subject: [PATCH 048/103] conforming other scripts to bash/sql refactoring --- NetworkGenerator.R | 74 ++++++++++++------------- functions/addDestinations.R | 97 +++++++++++---------------------- functions/getDestinationTypes.R | 40 +------------- functions/getPTStops.R | 36 ++++++------ functions/writeOutputs.R | 30 +++++----- network.Rproj | 13 +++++ 6 files changed, 115 insertions(+), 175 deletions(-) create mode 100644 network.Rproj diff --git a/NetworkGenerator.R b/NetworkGenerator.R index fe7f9db..37d52ca 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -33,22 +33,22 @@ makeNetwork<-function(city, outputFileName = "test"){ outputCrs = 7899 osmGpkg = "../data/processed/bendigo_osm.gpkg" unconfiguredSqlite = "../data/processed/bendigo_network_unconfigured.sqlite" - # cropAreaPoly = "" # must set 'crop2Area=F' + cropAreaPoly = "" # must set 'crop2Area=F' # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE # osmPbfExtract = "./data/brisbane_australia.osm.pbf" # ndviFile = "" # must set 'addNDVI=F' - # gtfs_feed = "./data/SEQ_GTFS.zip" + gtfs_feed = "../data/processed/gtfs.zip" } else if (city == "Melbourne") { region = "../data/processed/greater_melbourne.sqlite" outputCrs = 7899 osmGpkg = "../data/processed/melbourne_osm.gpkg" unconfiguredSqlite = "../data/processed/melbourne_network_unconfigured.sqlite" - # cropAreaPoly = "city-of-melbourne_victoria" + cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" # osmPbfExtract = "./data/melbourne_australia.osm.pbf" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" - # gtfs_feed = "./data/gtfs.zip" + gtfs_feed = "../data/processed/gtfs.zip" } else { echo(paste("City parameters for", city, "have not been set; unable to proceed\n")) @@ -65,7 +65,7 @@ makeNetwork<-function(city, outputFileName = "test"){ # NETWORK FROM OSM # A flag for whether to build unconfigured network from osm extract (if not, # must already have unconfigured sqlite) - networkFromOsm=T + networkFromOsm=F saveUnconfigured=T # SIMPLIFICATION @@ -74,7 +74,7 @@ makeNetwork<-function(city, outputFileName = "test"){ crop2Area=F # DENSIFICATION - desnificationMaxLengh=500 + densificationMaxLength=500 densifyBikeways=T # CAPACITY ADJUSTMENT @@ -84,16 +84,16 @@ makeNetwork<-function(city, outputFileName = "test"){ # ELEVATION # A flag for whether to add elevation or not - addElevation=T + addElevation=F ElevationMultiplier=1 # DESTINATIONS - # A flag for whether to add a destinations layer (drawn from OSM) or not + # A flag for whether to add a destinations layer (drawn from OSM, and GTFS for PT) or not addDestinationLayer=T # NDVI # A flag for whether to add NDVI or not - addNDVI=T + addNDVI=F # Buffer distance for finding average NDVI for links ndviBuffDist=30 @@ -175,14 +175,19 @@ makeNetwork<-function(city, outputFileName = "test"){ # Processing OSM, or loading existing layers if not required if(networkFromOsm) { echo(paste0("Starting to process osm extract file, ", osmGpkg, "\n")) - networkUnconfigured <- processOsm(osmGpkg, outputCrs) + networkUnconfiguredOutputs <- processOsm(osmGpkg, outputCrs) if (saveUnconfigured) { - st_write(networkUnconfigured[[1]], unconfiguredSqlite, layer = "nodes", delete_layer = T) - st_write(networkUnconfigured[[2]], unconfiguredSqlite, layer = "edges", delete_layer = T) - st_write(networkUnconfigured[[3]], unconfiguredSqlite, layer = "osm_metadata", delete_layer = T) + if (file_exists(unconfiguredSqlite)) st_delete(unconfiguredSqlite) + st_write(networkUnconfiguredOutputs[[1]], unconfiguredSqlite, layer = "nodes") + st_write(networkUnconfiguredOutputs[[2]], unconfiguredSqlite, layer = "edges") + st_write(networkUnconfiguredOutputs[[3]], unconfiguredSqlite, layer = "osm_metadata") } + networkUnconfigured <- list(networkUnconfiguredOutputs[[1]], + networkUnconfiguredOutputs[[2]]) + osm_metadata <- networkUnconfiguredOutputs[[3]] + } else { if (file_exists(unconfiguredSqlite)) { @@ -190,6 +195,8 @@ makeNetwork<-function(city, outputFileName = "test"){ networkUnconfigured <- list(st_read(unconfiguredSqlite, layer = "nodes") %>% st_set_geometry("geom"), st_read(unconfiguredSqlite, layer = "edges") %>% st_set_geometry("geom")) + osm_metadata <- st_read(unconfiguredSqlite, layer = "osm_metadata") %>% + filter(osm_id %in% networkUnconfigured[[2]]$osm_id) } else { echo(paste("Unconfigured network file", unconfiguredSqlite, "not found; unable to proceed\n")) @@ -197,38 +204,24 @@ makeNetwork<-function(city, outputFileName = "test"){ } } - # TO DO FROM HERE, and fix up 'networkInput' (now 'networkUnconfigured', and probably - # read in osm_metadata above, after 'networkUnconfigured <-') # crop to test area if required - if(crop2Area)system.time(networkInput <- crop2Poly(networkInput, - cropAreaPoly, - outputCrs)) + if(crop2Area)system.time(networkUnconfigured <- crop2Poly(networkUnconfigured, + cropAreaPoly, + outputCrs)) + # process OSM metadata echo("processing OSM meta data\n") - osm_metadata <- st_read(unconfiguredSqlite,layer="osm_metadata",quiet=T) %>% - filter(osm_id%in%networkInput[[2]]$osm_id) echo("Building default OSM attribute tables\n") defaults_df <- buildDefaultsDF() highway_lookup <- defaults_df %>% dplyr::select(highway, highway_order) echo("Processing OSM tags and joining with defaults\n") system.time( osmAttributes <- processOsmTags(osm_metadata,defaults_df)) - edgesAttributed <- networkInput[[2]] %>% + edgesAttributed <- networkUnconfigured[[2]] %>% inner_join(osmAttributes, by="osm_id") %>% - # dplyr::select(-osm_id,highway,highway_order) - dplyr::select(-highway,highway_order) - - cat(paste0("edgesAttributed:\n")) - str(edgesAttributed) - cat(paste0("\n")) + dplyr::select(-highway, highway_order) # keep only the largest connected component - largestComponent <- largestConnectedComponent(networkInput[[1]],edgesAttributed) - - cat(paste0("largestComponent, nodes:\n")) - str(largestComponent[[1]]) - cat(paste0("\nlargestComponent, edges:\n")) - str(largestComponent[[2]]) - cat(paste0("\n")) + largestComponent <- largestConnectedComponent(networkUnconfigured[[1]], edgesAttributed) # simplify intersections while preserving attributes and original geometry. system.time(intersectionsSimplified <- simplifyIntersections(largestComponent[[1]], @@ -287,8 +280,8 @@ makeNetwork<-function(city, outputFileName = "test"){ networkConnected <- largestNetworkSubgraph(networkNonDisconnected,'walk') # densify the network so that no residential streets are longer than 500m - if (addElevation==T & densifyBikeways==F) message("Consider changing densifyBikeways to true when addElevation is true to ge a more accurate slope esimation for bikeways") - networkDensified <- densifyNetwork(networkConnected,desnificationMaxLengh, + if (addElevation==T & densifyBikeways==F) message("Consider changing densifyBikeways to true when addElevation is true to get a more accurate slope esimation for bikeways") + networkDensified <- densifyNetwork(networkConnected,densificationMaxLength, densifyBikeways) # Adding NDVI to links @@ -302,10 +295,12 @@ makeNetwork<-function(city, outputFileName = "test"){ if (addDestinationLayer) { destinations <- addDestinations(networkDensified[[1]], networkDensified[[2]], - osmPbfExtract, + osmGpkg, city, gtfs_feed, - outputCrs) + outputCrs, + region, + regionBufferDist) } # simplify geometry so all edges are straight lines @@ -381,4 +376,5 @@ makeNetwork<-function(city, outputFileName = "test"){ } ## JUST FOR TESTING -output <- makeNetwork(city = "Bendigo") +makeNetwork(city = "Bendigo") +makeNetwork(city = "Melbourne") diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 2b376a2..af6d847 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -1,76 +1,53 @@ # function to create a destination layer to add to output network -# assumes input file (OSMextract) is in .osm.pbf format, for example, -# as downloaded from https://www.interline.io/osm/extracts/ - # uses functions for various destination types with tag combinations set out # in 'getDestinationTypes.R' -# NOTE - WILL REQUIRE REFACTORING IF THE .GPKG, RATHER THAN THE .OSM.PBF, IS -# SAVED AS THE BASE FILE - NEED TO EXTRACT THE OTHER TAGS FROM THE .GPKG - addDestinations <- function(nodes_current, edges_current, - osmPbfExtract, + osmGpkg, city, gtfs_feed, - outputCrs) { - + outputCrs, + region, + regionBufferDist) { + # nodes_current = networkDensified[[1]] # edges_current = networkDensified[[2]] - # osmPbfExtract = "./data/melbourne_australia.osm.pbf" + # osmGpkg = "../data/processed/melbourne_osm.gpkg" # city = "Melbourne" - # gtfs_feed = "./data/gtfs.zip" + # gtfs_feed = "../data/processed/gtfs.zip" # outputCrs = 28355 - # # check layers - # st_layers(osmPbfExtract) - # # only multipolygons, points and lines are required (not multilinestrings - # # or other_relations) [and lines not required when using GTFS for PT stops] - # # check keys # options(max.print = 2000) - # polygon.tags <- oe_get_keys(osmPbfExtract, layer = "multipolygons") %>% sort() - # point.tags <- oe_get_keys(osmPbfExtract, layer = "points") %>% sort() - # line.tags <- oe_get_keys(osmPbfExtract, layer = "lines") %>% sort() - + # point.tags <- oe_get_keys(osmGpkg, layer = "points") %>% sort() + # polygon.tags <- oe_get_keys(osmGpkg, layer = "multipolygons") %>% sort() + # reading layers ---- # ----------------------------------# - echo("Reading in the .osm.pbf extract layers\n") + echo("Reading in the OSM layers\n") - # create gpkg file in same directory as osmPbfExtract, using the 'extra_tags' - # Note: - # - the gpkg does not need to be retained permanently, but its creation is part - # of the process of reading the layers; if already created, the reading - # process will be quicker) - # - for simplicity, the same extra tags are added for all layers, though - # some don't exist for some layer types - extra.tags <- c("access", "amenity", "building", "grades", "healthcare", - "healthcare:speciality", "isced:level", "landuse", "leisure", - "network", "operator", "operator:type", "parking", - "public_transport", "railway", "school", "shop", - "social_facility", "sport", "tourism", "train") - # oe_vectortranslate(osmPbfExtract, layer = "multipolygons", extra_tags = extra.tags) - # oe_vectortranslate(osmPbfExtract, layer = "points", extra_tags = extra.tags) - # oe_vectortranslate(osmPbfExtract, layer = "lines", extra_tags = extra.tags) - # - # # read in the .gpkg file (same directory and name as .osm.pbf file, but .gpkg extension) - # gpkg <- paste0(path_dir(osmPbfExtract), "/", - # gsub(".osm.pbf", ".gpkg", path_file(osmPbfExtract))) - # read in the layers - polygons <- oe_read(osmPbfExtract, layer = "multipolygons", extra_tags = extra.tags) %>% - st_transform(outputCrs) - points <- oe_read(osmPbfExtract, layer = "points", extra_tags = extra.tags) %>% - st_transform(outputCrs) - # lines <- oe_read(osmPbfExtract, layer = "lines", extra_tags = extra.tags) %>% - # st_transform(outputCrs) + extra.tag.string <- "SELECT *, + hstore_get_value(other_tags, 'access') AS access, + hstore_get_value(other_tags, 'amenity') AS amenity, + hstore_get_value(other_tags, 'grades') AS grades, + hstore_get_value(other_tags, 'healthcare') AS healthcare, + hstore_get_value(other_tags, 'isced:level') AS isced_level, + hstore_get_value(other_tags, 'leisure') AS leisure, + hstore_get_value(other_tags, 'parking') AS parking, + hstore_get_value(other_tags, 'school') AS school, + hstore_get_value(other_tags, 'shop') AS shop, + hstore_get_value(other_tags, 'sport') AS sport" + # read in the layers + points <- oe_read(osmGpkg, query = paste(extra.tag.string, "FROM points"), quiet = TRUE) + polygons <- oe_read(osmGpkg, query = paste(extra.tag.string, "FROM multipolygons"), quiet = TRUE) # function to extract specific destination types from point or polygon layers ---- # ----------------------------------# # all the tag combination functions in 'getDestinationTypes.R' apply to both - # points and polygons, except 'railway station', which are a combination of - # point, polygon and line features + # points and polygons destination.layer <- function(layer) { return( @@ -106,44 +83,36 @@ addDestinations <- function(nodes_current, # and store area and location details destination.pt <- bind_rows(destination.layer(points), - - # # add stations (from point, polygons and lines) to point table - # getStation(points, polygons, lines) %>% - # mutate(dest_type = "railway_station")) %>% # add PT stops (from GTFS feed) to point table - getPTStops(city, gtfs_feed, outputCrs, edges_current) %>% + getPTStops(city, gtfs_feed, outputCrs, region, regionBufferDist) %>% mutate(dest_type = "pt_stop")) %>% mutate(dest_id = row_number(), area_m2 = 0, centroid_x = st_coordinates(.)[, 1], centroid_y = st_coordinates(.)[, 2]) - + destination.poly <- destination.layer(polygons) %>% + filter(st_is_valid(geom)) %>% mutate(dest_id = max(destination.pt$dest_id) + row_number(), area_m2 = as.numeric(st_area(.)), centroid_x = st_coordinates(st_centroid(.))[, 1], centroid_y = st_coordinates(st_centroid(.))[, 2]) - # Remove any invalid polygons as they may cause errors - destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] - - # Remove any invalid polygons as they may cause errors - destination.poly <- destination.poly[which(st_is_valid(destination.poly$geometry)), ] - + # # check numbers of each destination type # chk <- full_join(destination.poly %>% # st_drop_geometry() %>% # group_by(dest_type) %>% - # summarise(poly = n()), + # summarise(poly = n()), # destination.pt %>% # st_drop_geometry() %>% # group_by(dest_type) %>% - # summarise(pt = n()), + # summarise(pt = n()), # by = "dest_type") - + # find relevant nodes ---- # For all destinations except parks and schools ('small features'), relevant diff --git a/functions/getDestinationTypes.R b/functions/getDestinationTypes.R index 9570e17..e25ee86 100644 --- a/functions/getDestinationTypes.R +++ b/functions/getDestinationTypes.R @@ -1,8 +1,6 @@ # functions to locate specific types of destinations -## All tag combinations below can be applied to both points and polygons, except -## railway stations which are a combination of points, polygons and lines, and -## require aggregation within a boundary distance +## All tag combinations below can be applied to both points and polygons # 1 open space ---- getPlayground <- function(layer) { @@ -130,39 +128,3 @@ getParking <- function(layer) { return(layer %>% filter(amenity == "parking" & !access %in% c("no", "private"))) } - - -# 9 railway stations [not used] ---- -# See getPtStops.R instead - -# # Returns list of stations as points -# # Note the buffer distance of 100m below; closest railway stations in Melbourne are -# # Riversdale & Willison (about 420m) -# getStation <- function(points, polygons, lines) { -# # general filter to find station objects -# filterStation <- function(layer) { -# return(layer %>% -# filter((public_transport == "station" | public_transport == "stop_position") & -# (railway == "station" | railway == "stop" | train == "yes" | -# grepl("train", tolower(network)) | grepl("train", tolower(operator))) & -# (is.na(tourism) | tourism != "yes") & -# (is.na(railway) | railway != "construction"))) -# } -# -# # find each object, and buffer to 100m -# buff.dist <- 100 -# station.pt <- filterStation(points) %>% st_buffer(buff.dist) -# station.poly <- filterStation(polygons) %>% st_buffer(buff.dist) -# station.line <- filterStation(lines) %>% st_buffer(buff.dist) -# -# # dissolve, then separate to individual polygons -# stations <- bind_rows(station.pt, station.poly, station.line) %>% -# st_union() %>% -# st_as_sf() %>% -# st_cast("POLYGON") %>% -# st_centroid() %>% -# # label geometry column -# rename(geometry = x) -# -# } - diff --git a/functions/getPTStops.R b/functions/getPTStops.R index e6d4a09..a9f6b0a 100644 --- a/functions/getPTStops.R +++ b/functions/getPTStops.R @@ -2,24 +2,23 @@ # requires tidytransit (loaded in NetworkGenerator.R) -getPTStops <- function(city, gtfs_feed, outputCrs, edges_current) { +getPTStops <- function(city, gtfs_feed, outputCrs, region, regionBufferDist) { # city = "Melbourne" - # gtfs_feed = "./data/gtfs.zip" - # outputCrs = 28355 - # edges_current = networkDensified[[2]] + # gtfs_feed = "../data/processed/gtfs.zip" + # outputCrs = 7899 + # region = "../data/processed/greater_melbourne.sqlite" + # regionBufferDist = 10000 # read in GTFS feed gtfs <- read_gtfs(gtfs_feed) %>% gtfs_as_sf(., crs = 4326) - # extract stops with their locations + # extract stops with their locations, filtered to study area + study.area <- st_buffer(st_read(region), regionBufferDist) stops <- gtfs$stops %>% - st_transform(outputCrs) - - # limit to stops within the study area (convex hull of edges) - stops <- stops %>% - st_filter(., st_convex_hull(st_union(edges_current)), - predicate = st_intersects) + st_transform(outputCrs) %>% + st_set_geometry("geom") %>% + st_filter(study.area, predicate = st_intersects) # table of stops and route types stops.routetypes <- gtfs$stop_times %>% @@ -33,9 +32,12 @@ getPTStops <- function(city, gtfs_feed, outputCrs, edges_current) { # apply route types route_types = stops.routetypes$route_type %>% unique() %>% sort() - if (city == "ProvisionForMunich") { # test should be city is Munich AND stops are in the expected list - - + if (city == "exception_city") { + # if a city is known to have exceptional route types, build a condition that + # applies where the city is specified and the route types are in the expected + # list - eg city == "Gotham City" & all(route_types %in% c("13", "14", "15")) - + # and provide an appropriate message and function (similar to 'else') + } else if (!all(route_types %in% c("0", "1", "2", "3", "4", "5", "6", "7", "11", "12"))) { message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". Unable to process these using the standard route codes from https://developers.google.com/transit/gtfs/reference, which are: @@ -48,7 +50,7 @@ PT stops will not be included in destinations.") message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". Using standard route_type codes from https://developers.google.com/transit/gtfs/reference: 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12 monorail. -Check that these match the codes used in your GTFS feed.") +Adjust 'getPTStops' function if these don't match the codes used in your GTFS feed.") stops.routetypes.coded <- stops.routetypes %>% mutate(pt_stop_type = case_when( route_type == 0 ~ "tram", @@ -78,6 +80,4 @@ Check that these match the codes used in your GTFS feed.") return(c()) # empty vector if no stops can be returned } - - -} \ No newline at end of file +} diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index e82d7e5..e6ecf40 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -181,21 +181,21 @@ exportXML <- function(networkFinal, outputDir){ if(!("id" %in% colnames(links))) links <- links %>% mutate(id=NA) if(!("fwd_slope_pct" %in% colnames(links))) links <- links %>% mutate(fwd_slope_pct=NA, rvs_slope_pct=NA) - # Adding a reverse links for bi-directionals - bi_links <- links %>% - filter(is_oneway==0) %>% - rename(from_id=to_id, to_id=from_id, toX=fromX, toY=fromY, fromX=toX, - fromY=toY, slope=rvs_slope_pct) %>% - mutate(id=NA) %>% - dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, - permlanes, capacity, is_oneway, cycleway, highway, surface, - slope, is_cycle, is_walk, is_car, modes, id) - - links <- rbind( - {links %>% dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, - permlanes, capacity, is_oneway, cycleway, highway, surface, - slope=fwd_slope_pct, is_cycle, is_walk, is_car, modes, id)}, - bi_links) + # # Adding a reverse links for bi-directionals - not required as makeEdgesOneway has made them all oneway + # bi_links <- links %>% + # filter(is_oneway==0) %>% + # rename(from_id=to_id, to_id=from_id, toX=fromX, toY=fromY, fromX=toX, + # fromY=toY, slope=rvs_slope_pct) %>% + # mutate(id=NA) %>% + # dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, + # permlanes, capacity, is_oneway, cycleway, highway, surface, + # slope, is_cycle, is_walk, is_car, modes, id) + # + # links <- rbind( + # {links %>% dplyr::select(from_id, to_id, fromX, fromY, toX, toY, length, freespeed, + # permlanes, capacity, is_oneway, cycleway, highway, surface, + # slope=fwd_slope_pct, is_cycle, is_walk, is_car, modes, id)}, + # bi_links) # Adding bicycle and extra information links <- fncols(links, c("id","osm_id", "highway", "cycleway","slope", diff --git a/network.Rproj b/network.Rproj new file mode 100644 index 0000000..8e3c2eb --- /dev/null +++ b/network.Rproj @@ -0,0 +1,13 @@ +Version: 1.0 + +RestoreWorkspace: Default +SaveWorkspace: Default +AlwaysSaveHistory: Default + +EnableCodeIndexing: Yes +UseSpacesForTab: Yes +NumSpacesForTab: 2 +Encoding: UTF-8 + +RnwWeave: Sweave +LaTeX: pdfLaTeX From 553b739afa3c5e0ab2a6a60015e8d1166d1ac660 Mon Sep 17 00:00:00 2001 From: Afshin Jafari <3414292+jafshin@users.noreply.github.com> Date: Thu, 25 Jan 2024 18:15:29 +1100 Subject: [PATCH 049/103] Update LICENSE (#27) --- LICENSE | 695 ++------------------------------------------------------ 1 file changed, 21 insertions(+), 674 deletions(-) diff --git a/LICENSE b/LICENSE index f288702..cfa19f2 100644 --- a/LICENSE +++ b/LICENSE @@ -1,674 +1,21 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. +MIT License + +Copyright (c) 2023 RMIT University + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. From ebd3e339b21cf8757b9066010af8d426e3b10789 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 15:31:32 +1100 Subject: [PATCH 050/103] updating renv packages and using tidverse instead of individual packages --- NetworkGenerator.R | 7 +- renv.lock | 323 +++++++++++++++------------------------------ renv/activate.R | 27 +--- renv/settings.dcf | 10 -- renv/settings.json | 19 +++ 5 files changed, 128 insertions(+), 258 deletions(-) delete mode 100644 renv/settings.dcf create mode 100644 renv/settings.json diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 37d52ca..7c73abd 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -115,21 +115,16 @@ makeNetwork<-function(city, outputFileName = "test"){ library(sf) library(fs) - library(dplyr) - library(tidyr) + library(tidyverse) library(data.table) - library(stringr) library(igraph) library(raster) library(terra) - library(rgdal) - library(purrr) library(lwgeom) library(tidytransit) library(hablar) library(hms) library(osmextract) - library(tidyr) library(doSNOW) library(parallel) library(foreach) diff --git a/renv.lock b/renv.lock index 5b8047f..fb5c6aa 100644 --- a/renv.lock +++ b/renv.lock @@ -1,24 +1,24 @@ { "R": { - "Version": "4.1.2", + "Version": "4.2.1", "Repositories": [ { "Name": "CRAN", - "URL": "https://cloud.r-project.org" + "URL": "https://packagemanager.posit.co/cran/latest" } ] }, "Packages": { "DBI": { "Package": "DBI", - "Version": "1.1.3", + "Version": "1.2.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "methods" ], - "Hash": "b2866e62bab9378c3cc9476a1954226b" + "Hash": "9b4993e98e0e19da84c168460c032fef" }, "KernSmooth": { "Package": "KernSmooth", @@ -33,35 +33,18 @@ }, "MASS": { "Package": "MASS", - "Version": "7.3-60", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "graphics", - "methods", - "stats", - "utils" - ], - "Hash": "a56a6365b3fa73293ea8d084be0d9bb0" - }, - "Matrix": { - "Package": "Matrix", - "Version": "1.6-1", + "Version": "7.3-60.0.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "grDevices", "graphics", - "grid", - "lattice", "methods", "stats", "utils" ], - "Hash": "cb6855ac711958ca734b75e631b2035d" + "Hash": "b765b28387acc8ec9e9c1530713cb19c" }, "R6": { "Package": "R6", @@ -75,14 +58,14 @@ }, "Rcpp": { "Package": "Rcpp", - "Version": "1.0.11", + "Version": "1.0.12", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "methods", "utils" ], - "Hash": "ae6cbbe1492f4de79c45fce06f967ce8" + "Hash": "5ea2700d21e038ace58269ecdbeb9ec0" }, "askpass": { "Package": "askpass", @@ -125,60 +108,62 @@ }, "cli": { "Package": "cli", - "Version": "3.6.1", + "Version": "3.6.2", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "utils" ], - "Hash": "89e6d8219950eac806ae0c489052048a" + "Hash": "1216ac65ac55ec0058a6f75d7ca0fd52" }, - "cpp11": { - "Package": "cpp11", - "Version": "0.4.6", + "codetools": { + "Package": "codetools", + "Version": "0.2-19", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R" ], - "Hash": "707fae4bbf73697ec8d85f9d7076c061" + "Hash": "c089a619a7fae175d149d89164f8c7d8" }, - "curl": { - "Package": "curl", - "Version": "5.0.2", + "cpp11": { + "Package": "cpp11", + "Version": "0.4.7", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R" ], - "Hash": "511bacbfa153a15251166b463b4da4f9" + "Hash": "5a295d7d963cc5035284dcdbaf334f4e" }, - "data.table": { - "Package": "data.table", - "Version": "1.14.8", + "curl": { + "Package": "curl", + "Version": "5.2.0", "Source": "Repository", "Repository": "CRAN", "Requirements": [ - "R", - "methods" + "R" ], - "Hash": "b4c06e554f33344e044ccd7fdca750a9" + "Hash": "ce88d13c0b10fe88a37d9c59dba2d7f9" }, - "digest": { - "Package": "digest", - "Version": "0.6.33", + "doSNOW": { + "Package": "doSNOW", + "Version": "1.0.20", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", + "foreach", + "iterators", + "snow", "utils" ], - "Hash": "b18a9cf3c003977b0cc49d5e76ebe48d" + "Hash": "4bf4423d2c8b68c93b47e32ec554c2f1" }, "dplyr": { "Package": "dplyr", - "Version": "1.1.3", + "Version": "1.1.4", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -197,11 +182,11 @@ "utils", "vctrs" ], - "Hash": "e85ffbebaad5f70e1a2e2ef4302b4949" + "Hash": "fedd9d00c2944ff00a0e2696ccf048ec" }, "e1071": { "Package": "e1071", - "Version": "1.7-13", + "Version": "1.7-14", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -213,11 +198,11 @@ "stats", "utils" ], - "Hash": "1046cb48d06cb40c2900d8878f03a0fe" + "Hash": "4ef372b716824753719a8a38b258442d" }, "fansi": { "Package": "fansi", - "Version": "1.0.4", + "Version": "1.0.6", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -225,7 +210,20 @@ "grDevices", "utils" ], - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" + "Hash": "962174cf2aeb5b9eea581522286a911f" + }, + "foreach": { + "Package": "foreach", + "Version": "1.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "codetools", + "iterators", + "utils" + ], + "Hash": "618609b42c9406731ead03adf5379850" }, "fs": { "Package": "fs", @@ -249,61 +247,16 @@ ], "Hash": "15e9634c0fcd294799e9b2e929ed1b86" }, - "geodist": { - "Package": "geodist", - "Version": "0.0.8", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "be85dc7cec76dbd9a9478724ba518146" - }, "glue": { "Package": "glue", - "Version": "1.6.2", + "Version": "1.7.0", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "methods" ], - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" - }, - "gtfsio": { - "Package": "gtfsio", - "Version": "1.1.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "data.table", - "utils", - "zip" - ], - "Hash": "d02f8456a746098e0456cfd98db473c5" - }, - "hablar": { - "Package": "hablar", - "Version": "0.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "dplyr", - "lubridate", - "purrr" - ], - "Hash": "c7a6a49207405553fd26eff865d94360" - }, - "hms": { - "Package": "hms", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "lifecycle", - "methods", - "pkgconfig", - "rlang", - "vctrs" - ], - "Hash": "b59377caa7ed00fa41808342002138f9" + "Hash": "e0b3a53876554bd45879e596cdb10a52" }, "httr": { "Package": "httr", @@ -320,41 +273,30 @@ ], "Hash": "ac107251d9d9fd72f0ca8049988f1d7f" }, - "igraph": { - "Package": "igraph", - "Version": "1.5.1", + "iterators": { + "Package": "iterators", + "Version": "1.0.14", "Source": "Repository", "Repository": "CRAN", "Requirements": [ - "Matrix", "R", - "cli", - "cpp11", - "grDevices", - "graphics", - "lifecycle", - "magrittr", - "methods", - "pkgconfig", - "rlang", - "stats", "utils" ], - "Hash": "80401cb5ec513e8ddc56764d03f63669" + "Hash": "8954069286b4b2b0d023d1b288dce978" }, "jsonlite": { "Package": "jsonlite", - "Version": "1.8.7", + "Version": "1.8.8", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "methods" ], - "Hash": "266a20443ca13c65688b2116d5220f76" + "Hash": "e1b9c55281c5adc4dd113652d9e26768" }, "lattice": { "Package": "lattice", - "Version": "0.21-8", + "Version": "0.22-5", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -365,11 +307,11 @@ "stats", "utils" ], - "Hash": "0b8a6d63c8770f02a8b5635f3c431e6b" + "Hash": "7c5e89f04e72d6611c77451f6331a091" }, "lifecycle": { "Package": "lifecycle", - "Version": "1.0.3", + "Version": "1.0.4", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -378,20 +320,7 @@ "glue", "rlang" ], - "Hash": "001cecbeac1cff9301bdc3775ee46a86" - }, - "lubridate": { - "Package": "lubridate", - "Version": "1.9.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "generics", - "methods", - "timechange" - ], - "Hash": "e25f18436e3efd42c7c590a1c4c15390" + "Hash": "b8552d117e1b808b09a832f589b79035" }, "lwgeom": { "Package": "lwgeom", @@ -428,13 +357,13 @@ }, "openssl": { "Package": "openssl", - "Version": "2.1.0", + "Version": "2.1.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "askpass" ], - "Hash": "273a6bb4a9844c296a459d2176673270" + "Hash": "2a0dc8c6adfb6f032e4d4af82d258ab5" }, "osmextract": { "Package": "osmextract", @@ -507,7 +436,7 @@ }, "raster": { "Package": "raster", - "Version": "3.6-23", + "Version": "3.6-26", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -517,48 +446,32 @@ "sp", "terra" ], - "Hash": "337d6d70f7d6bf78df236a5a53f09db0" + "Hash": "7d6eda494f34a644420ac1bfd2a8023a" }, "renv": { "Package": "renv", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "4b22ac016fe54028b88d0c68badbd061" - }, - "rgdal": { - "Package": "rgdal", - "Version": "1.6-7", + "Version": "1.0.3", "Source": "Repository", "Repository": "CRAN", "Requirements": [ - "R", - "grDevices", - "graphics", - "methods", - "sp", - "stats", "utils" ], - "Hash": "10b777236c9e7855bc9dea8e347e30b7" + "Hash": "41b847654f567341725473431dd0d5ab" }, "rlang": { "Package": "rlang", - "Version": "1.1.1", + "Version": "1.1.3", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "utils" ], - "Hash": "a85c767b55f0bf9b7ad16c6d7baee5bb" + "Hash": "42548638fae05fd9a9b5f3f437fbbbe2" }, "s2": { "Package": "s2", - "Version": "1.1.4", + "Version": "1.1.6", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -566,11 +479,11 @@ "Rcpp", "wk" ], - "Hash": "f1cbe03bb3346f8e817518ffa20f9f5a" + "Hash": "32f7b1a15bb01ae809022960abad5363" }, "sf": { "Package": "sf", - "Version": "1.0-14", + "Version": "1.0-15", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -589,11 +502,22 @@ "units", "utils" ], - "Hash": "e2111252a76984ca50bf8d6314348681" + "Hash": "f432b3379fb1a47046e253468b6b6b6d" + }, + "snow": { + "Package": "snow", + "Version": "0.4-4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "utils" + ], + "Hash": "40b74690debd20c57d93d8c246b305d4" }, "sp": { "Package": "sp", - "Version": "2.0-0", + "Version": "2.1-2", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -606,11 +530,11 @@ "stats", "utils" ], - "Hash": "2551981e6f85d59c81652bf654d6c3ca" + "Hash": "40a9887191d33b2521a1d741f8c8aea2" }, "stringi": { "Package": "stringi", - "Version": "1.7.12", + "Version": "1.8.3", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -619,11 +543,11 @@ "tools", "utils" ], - "Hash": "ca8bd84263c77310739d2cf64d84d7c9" + "Hash": "058aebddea264f4c99401515182e656a" }, "stringr": { "Package": "stringr", - "Version": "1.5.0", + "Version": "1.5.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -636,7 +560,7 @@ "stringi", "vctrs" ], - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" + "Hash": "960e2ae9e09656611e0b8214ad543207" }, "sys": { "Package": "sys", @@ -647,7 +571,7 @@ }, "terra": { "Package": "terra", - "Version": "1.7-46", + "Version": "1.7-65", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -655,7 +579,7 @@ "Rcpp", "methods" ], - "Hash": "d15182a8a20ffc5880e721bfa1bf4ce9" + "Hash": "8e245fd4eab07bf55ddb2e6ea353c0e1" }, "tibble": { "Package": "tibble", @@ -678,7 +602,7 @@ }, "tidyr": { "Package": "tidyr", - "Version": "1.3.0", + "Version": "1.3.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -697,7 +621,7 @@ "utils", "vctrs" ], - "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" + "Hash": "915fb7ce036c22a6a33b5a8adb712eb1" }, "tidyselect": { "Package": "tidyselect", @@ -715,59 +639,30 @@ ], "Hash": "79540e5fcd9e0435af547d885f184fd5" }, - "tidytransit": { - "Package": "tidytransit", - "Version": "1.6.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "data.table", - "digest", - "dplyr", - "geodist", - "gtfsio", - "hms", - "rlang", - "sf" - ], - "Hash": "a9255650349cc3438c03020e509c1074" - }, - "timechange": { - "Package": "timechange", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "8548b44f79a35ba1791308b61e6012d7" - }, "units": { "Package": "units", - "Version": "0.8-3", + "Version": "0.8-5", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "Rcpp" ], - "Hash": "880ebc99e4d8f7e5f3caeb2f12632583" + "Hash": "119d19da480e873f72241ff6962ffd83" }, "utf8": { "Package": "utf8", - "Version": "1.2.3", + "Version": "1.2.4", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R" ], - "Hash": "1fe17157424bb09c48a8b3b550c753bc" + "Hash": "62b65c52671e6665f803ff02954446e9" }, "vctrs": { "Package": "vctrs", - "Version": "0.6.3", + "Version": "0.6.5", "Source": "Repository", "Repository": "CRAN", "Requirements": [ @@ -777,37 +672,29 @@ "lifecycle", "rlang" ], - "Hash": "d0ef2856b83dc33ea6e255caf6229ee2" + "Hash": "c03fa420630029418f7e6da3667aac4a" }, "withr": { "Package": "withr", - "Version": "2.5.0", + "Version": "3.0.0", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R", "grDevices", - "graphics", - "stats" + "graphics" ], - "Hash": "c0e49a9760983e81e55cdd9be92e7182" + "Hash": "d31b6c62c10dcf11ec530ca6b0dd5d35" }, "wk": { "Package": "wk", - "Version": "0.8.0", + "Version": "0.9.1", "Source": "Repository", "Repository": "CRAN", "Requirements": [ "R" ], - "Hash": "aaf7e20556e3125a09d53453814ad339" - }, - "zip": { - "Package": "zip", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "d98c94dacb7e0efcf83b0a133a705504" + "Hash": "5d4545e140e36476f35f20d0ca87963e" } } } diff --git a/renv/activate.R b/renv/activate.R index 2969c73..cb5401f 100644 --- a/renv/activate.R +++ b/renv/activate.R @@ -2,7 +2,7 @@ local({ # the requested version of renv - version <- "1.0.2" + version <- "1.0.3" attr(version, "sha") <- NULL # the project directory @@ -1034,19 +1034,6 @@ local({ } - - renv_bootstrap_in_rstudio <- function() { - commandArgs()[[1]] == "RStudio" - } - - # Used to work around buglet in RStudio if hook uses readline - renv_bootstrap_flush_console <- function() { - tryCatch({ - tools <- as.environment("tools:rstudio") - tools$.rs.api.sendToConsole("", echo = FALSE, focus = FALSE) - }, error = function(cnd) {}) - } - renv_json_read <- function(file = NULL, text = NULL) { jlerr <- NULL @@ -1185,16 +1172,8 @@ local({ # construct full libpath libpath <- file.path(root, prefix) - if (renv_bootstrap_in_rstudio()) { - # RStudio only updates console once .Rprofile is finished, so - # instead run code on sessionInit - setHook("rstudio.sessionInit", function(...) { - renv_bootstrap_exec(project, libpath, version) - renv_bootstrap_flush_console() - }) - } else { - renv_bootstrap_exec(project, libpath, version) - } + # run bootstrap code + renv_bootstrap_exec(project, libpath, version) invisible() diff --git a/renv/settings.dcf b/renv/settings.dcf deleted file mode 100644 index 169d82f..0000000 --- a/renv/settings.dcf +++ /dev/null @@ -1,10 +0,0 @@ -bioconductor.version: -external.libraries: -ignored.packages: -package.dependency.fields: Imports, Depends, LinkingTo -r.version: -snapshot.type: implicit -use.cache: TRUE -vcs.ignore.cellar: TRUE -vcs.ignore.library: TRUE -vcs.ignore.local: TRUE diff --git a/renv/settings.json b/renv/settings.json new file mode 100644 index 0000000..ffdbb32 --- /dev/null +++ b/renv/settings.json @@ -0,0 +1,19 @@ +{ + "bioconductor.version": null, + "external.libraries": [], + "ignored.packages": [], + "package.dependency.fields": [ + "Imports", + "Depends", + "LinkingTo" + ], + "ppm.enabled": null, + "ppm.ignored.urls": [], + "r.version": null, + "snapshot.type": "implicit", + "use.cache": true, + "vcs.ignore.cellar": true, + "vcs.ignore.library": true, + "vcs.ignore.local": true, + "vcs.manage.ignores": true +} From 7d3202865258e1f983aeadbfdbec365f091aff80 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 15:53:32 +1100 Subject: [PATCH 051/103] data folder is in the main dir --- NetworkGenerator.R | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 7c73abd..2c24807 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -29,26 +29,26 @@ makeNetwork<-function(city, outputFileName = "test"){ # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) if (city == "Bendigo") { - region = "../data/processed/greater_bendigo.sqlite" + region = "./data/processed/greater_bendigo.sqlite" outputCrs = 7899 - osmGpkg = "../data/processed/bendigo_osm.gpkg" - unconfiguredSqlite = "../data/processed/bendigo_network_unconfigured.sqlite" + osmGpkg = "./data/processed/bendigo_osm.gpkg" + unconfiguredSqlite = "./data/processed/bendigo_network_unconfigured.sqlite" cropAreaPoly = "" # must set 'crop2Area=F' # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE # osmPbfExtract = "./data/brisbane_australia.osm.pbf" # ndviFile = "" # must set 'addNDVI=F' - gtfs_feed = "../data/processed/gtfs.zip" + gtfs_feed = "./data/processed/gtfs.zip" } else if (city == "Melbourne") { - region = "../data/processed/greater_melbourne.sqlite" + region = "./data/processed/greater_melbourne.sqlite" outputCrs = 7899 - osmGpkg = "../data/processed/melbourne_osm.gpkg" + osmGpkg = "./data/processed/melbourne_osm.gpkg" unconfiguredSqlite = "../data/processed/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" # osmPbfExtract = "./data/melbourne_australia.osm.pbf" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" - gtfs_feed = "../data/processed/gtfs.zip" + gtfs_feed = "./data/processed/gtfs.zip" } else { echo(paste("City parameters for", city, "have not been set; unable to proceed\n")) From ee28f7dd3145a9df5bfb01e747860cbc9c720dfb Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 15:55:15 +1100 Subject: [PATCH 052/103] adding retainDownload to func call - it is a local variable and must be called --- NetworkGenerator.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 2c24807..e347025 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -164,7 +164,7 @@ makeNetwork<-function(city, outputFileName = "test"){ # Downloading OSM if (downloadOsm) { echo(paste0("Downloading OSM extract for ", city, "\n")) - getOsmExtract(region, outputCrs, regionBufferDist, osmGpkg) + getOsmExtract(region, outputCrs, regionBufferDist, osmGpkg, retainDownload) } # Processing OSM, or loading existing layers if not required From 7c61b00f6a6f5cdb8d24b127c6422dc342670938 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 16:01:42 +1100 Subject: [PATCH 053/103] adding retainDowlonad to the function file too --- functions/getOsmExtract.R | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index b22c1e4..e8effa7 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -3,7 +3,8 @@ getOsmExtract <- function(region, outputCrs, regionBufferDist = 10000, - osmGpkg) { + osmGpkg, + retainDownload) { # region = "../data/processed/greater_bendigo.sqlite" # outputCrs = 7899 From a19f6f807ff43dcdd754061e40fbe408a91afca0 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 16:18:43 +1100 Subject: [PATCH 054/103] making the default to build from osm --- NetworkGenerator.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index e347025..b886c1f 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -58,7 +58,7 @@ makeNetwork<-function(city, outputFileName = "test"){ # DOWNLOAD OSM EXTRACT # A flag for whether to download osm extract for the region (if not, and if # network needs to be processed, then must already have osmGpkg file) - downloadOsm=F + downloadOsm=T regionBufferDist=10000 # Distance to buffer region when getting osm extract retainDownload=F # Whether to retain downloaded file after region extracted From e72951c953e221e677f2401d993b30c54a11a7da Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 16:18:56 +1100 Subject: [PATCH 055/103] updating the renv file --- renv.lock | 1222 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 1215 insertions(+), 7 deletions(-) diff --git a/renv.lock b/renv.lock index fb5c6aa..ce9332e 100644 --- a/renv.lock +++ b/renv.lock @@ -9,6 +9,13 @@ ] }, "Packages": { + "BH": { + "Package": "BH", + "Version": "1.84.0-0", + "Source": "Repository", + "Repository": "RSPM", + "Hash": "a8235afbcd6316e6e91433ea47661013" + }, "DBI": { "Package": "DBI", "Version": "1.2.1", @@ -46,6 +53,23 @@ ], "Hash": "b765b28387acc8ec9e9c1530713cb19c" }, + "Matrix": { + "Package": "Matrix", + "Version": "1.6-5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "grDevices", + "graphics", + "grid", + "lattice", + "methods", + "stats", + "utils" + ], + "Hash": "8c7115cd3a0e048bda2a7cd110549f7a" + }, "R6": { "Package": "R6", "Version": "2.5.1", @@ -56,6 +80,16 @@ ], "Hash": "470851b6d5d0ac559e9d01bb352b4021" }, + "RColorBrewer": { + "Package": "RColorBrewer", + "Version": "1.1-3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "45f0398006e83a5b10b72a90663d8d8c" + }, "Rcpp": { "Package": "Rcpp", "Version": "1.0.12", @@ -67,6 +101,19 @@ ], "Hash": "5ea2700d21e038ace58269ecdbeb9ec0" }, + "RcppEigen": { + "Package": "RcppEigen", + "Version": "0.3.3.9.4", + "Source": "Repository", + "Repository": "RSPM", + "Requirements": [ + "R", + "Rcpp", + "stats", + "utils" + ], + "Hash": "acb0a5bf38490f26ab8661b467f4f53a" + }, "askpass": { "Package": "askpass", "Version": "1.2.0", @@ -77,6 +124,140 @@ ], "Hash": "cad6cf7f1d5f6e906700b9d3e718c796" }, + "backports": { + "Package": "backports", + "Version": "1.4.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "c39fbec8a30d23e721980b8afb31984c" + }, + "base64enc": { + "Package": "base64enc", + "Version": "0.1-3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "543776ae6848fde2f48ff3816d0628bc" + }, + "bit": { + "Package": "bit", + "Version": "4.0.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "d242abec29412ce988848d0294b208fd" + }, + "bit64": { + "Package": "bit64", + "Version": "4.0.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "bit", + "methods", + "stats", + "utils" + ], + "Hash": "9fe98599ca456d6552421db0d6772d8f" + }, + "blob": { + "Package": "blob", + "Version": "1.2.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "methods", + "rlang", + "vctrs" + ], + "Hash": "40415719b5a479b87949f3aa0aee737c" + }, + "broom": { + "Package": "broom", + "Version": "1.0.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "backports", + "dplyr", + "ellipsis", + "generics", + "glue", + "lifecycle", + "purrr", + "rlang", + "stringr", + "tibble", + "tidyr" + ], + "Hash": "fd25391c3c4f6ecf0fa95f1e6d15378c" + }, + "bslib": { + "Package": "bslib", + "Version": "0.6.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "base64enc", + "cachem", + "grDevices", + "htmltools", + "jquerylib", + "jsonlite", + "lifecycle", + "memoise", + "mime", + "rlang", + "sass" + ], + "Hash": "c0d8599494bc7fb408cd206bbdd9cab0" + }, + "cachem": { + "Package": "cachem", + "Version": "1.0.8", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "fastmap", + "rlang" + ], + "Hash": "c35768291560ce302c0a6589f92e837d" + }, + "callr": { + "Package": "callr", + "Version": "3.7.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "processx", + "utils" + ], + "Hash": "9b2191ede20fa29828139b9900922e51" + }, + "cellranger": { + "Package": "cellranger", + "Version": "1.1.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "rematch", + "tibble" + ], + "Hash": "f61dbaec772ccd2e17705c1e872e9e7c" + }, "class": { "Package": "class", "Version": "7.3-22", @@ -117,6 +298,16 @@ ], "Hash": "1216ac65ac55ec0058a6f75d7ca0fd52" }, + "clipr": { + "Package": "clipr", + "Version": "0.8.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "utils" + ], + "Hash": "3f038e5ac7f41d4ac41ce658c85e3042" + }, "codetools": { "Package": "codetools", "Version": "0.2-19", @@ -127,6 +318,33 @@ ], "Hash": "c089a619a7fae175d149d89164f8c7d8" }, + "colorspace": { + "Package": "colorspace", + "Version": "2.1-0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "grDevices", + "graphics", + "methods", + "stats" + ], + "Hash": "f20c47fd52fae58b4e377c37bb8c335b" + }, + "conflicted": { + "Package": "conflicted", + "Version": "1.2.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "memoise", + "rlang" + ], + "Hash": "bb097fccb22d156624fd07cd2894ddb6" + }, "cpp11": { "Package": "cpp11", "Version": "0.4.7", @@ -137,6 +355,18 @@ ], "Hash": "5a295d7d963cc5035284dcdbaf334f4e" }, + "crayon": { + "Package": "crayon", + "Version": "1.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "grDevices", + "methods", + "utils" + ], + "Hash": "e8a1e41acf02548751f45c718d55aa6a" + }, "curl": { "Package": "curl", "Version": "5.2.0", @@ -147,6 +377,56 @@ ], "Hash": "ce88d13c0b10fe88a37d9c59dba2d7f9" }, + "data.table": { + "Package": "data.table", + "Version": "1.14.10", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "methods" + ], + "Hash": "6ea17a32294d8ca00455825ab0cf71b9" + }, + "dbplyr": { + "Package": "dbplyr", + "Version": "2.4.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "DBI", + "R", + "R6", + "blob", + "cli", + "dplyr", + "glue", + "lifecycle", + "magrittr", + "methods", + "pillar", + "purrr", + "rlang", + "tibble", + "tidyr", + "tidyselect", + "utils", + "vctrs", + "withr" + ], + "Hash": "59351f28a81f0742720b85363c4fdd61" + }, + "digest": { + "Package": "digest", + "Version": "0.6.34", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "utils" + ], + "Hash": "7ede2ee9ea8d3edbf1ca84c1e333ad1a" + }, "doSNOW": { "Package": "doSNOW", "Version": "1.0.20", @@ -184,6 +464,25 @@ ], "Hash": "fedd9d00c2944ff00a0e2696ccf048ec" }, + "dtplyr": { + "Package": "dtplyr", + "Version": "1.3.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "data.table", + "dplyr", + "glue", + "lifecycle", + "rlang", + "tibble", + "tidyselect", + "vctrs" + ], + "Hash": "54ed3ea01b11e81a86544faaecfef8e2" + }, "e1071": { "Package": "e1071", "Version": "1.7-14", @@ -200,6 +499,28 @@ ], "Hash": "4ef372b716824753719a8a38b258442d" }, + "ellipsis": { + "Package": "ellipsis", + "Version": "0.3.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "rlang" + ], + "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077" + }, + "evaluate": { + "Package": "evaluate", + "Version": "0.23", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "methods" + ], + "Hash": "daf4a1246be12c1fa8c7705a0935c1a0" + }, "fansi": { "Package": "fansi", "Version": "1.0.6", @@ -212,6 +533,48 @@ ], "Hash": "962174cf2aeb5b9eea581522286a911f" }, + "farver": { + "Package": "farver", + "Version": "2.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "8106d78941f34855c440ddb946b8f7a5" + }, + "fastmap": { + "Package": "fastmap", + "Version": "1.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "f7736a18de97dea803bde0a2daaafb27" + }, + "fontawesome": { + "Package": "fontawesome", + "Version": "0.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "htmltools", + "rlang" + ], + "Hash": "c2efdd5f0bcd1ea861c2d4e2a883a67d" + }, + "forcats": { + "Package": "forcats", + "Version": "1.0.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "glue", + "lifecycle", + "magrittr", + "rlang", + "tibble" + ], + "Hash": "1a0a9a3d5083d0d573c4214576f1e690" + }, "foreach": { "Package": "foreach", "Version": "1.5.2", @@ -236,6 +599,28 @@ ], "Hash": "47b5f30c720c23999b913a1a635cf0bb" }, + "gargle": { + "Package": "gargle", + "Version": "1.5.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "fs", + "glue", + "httr", + "jsonlite", + "lifecycle", + "openssl", + "rappdirs", + "rlang", + "stats", + "utils", + "withr" + ], + "Hash": "fc0b272e5847c58cd5da9b20eedbd026" + }, "generics": { "Package": "generics", "Version": "0.1.3", @@ -247,6 +632,38 @@ ], "Hash": "15e9634c0fcd294799e9b2e929ed1b86" }, + "geodist": { + "Package": "geodist", + "Version": "0.0.8", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "be85dc7cec76dbd9a9478724ba518146" + }, + "ggplot2": { + "Package": "ggplot2", + "Version": "3.4.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "MASS", + "R", + "cli", + "glue", + "grDevices", + "grid", + "gtable", + "isoband", + "lifecycle", + "mgcv", + "rlang", + "scales", + "stats", + "tibble", + "vctrs", + "withr" + ], + "Hash": "313d31eff2274ecf4c1d3581db7241f9" + }, "glue": { "Package": "glue", "Version": "1.7.0", @@ -258,6 +675,161 @@ ], "Hash": "e0b3a53876554bd45879e596cdb10a52" }, + "googledrive": { + "Package": "googledrive", + "Version": "2.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "gargle", + "glue", + "httr", + "jsonlite", + "lifecycle", + "magrittr", + "pillar", + "purrr", + "rlang", + "tibble", + "utils", + "uuid", + "vctrs", + "withr" + ], + "Hash": "e99641edef03e2a5e87f0a0b1fcc97f4" + }, + "googlesheets4": { + "Package": "googlesheets4", + "Version": "1.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cellranger", + "cli", + "curl", + "gargle", + "glue", + "googledrive", + "httr", + "ids", + "lifecycle", + "magrittr", + "methods", + "purrr", + "rematch2", + "rlang", + "tibble", + "utils", + "vctrs", + "withr" + ], + "Hash": "d6db1667059d027da730decdc214b959" + }, + "gtable": { + "Package": "gtable", + "Version": "0.3.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "glue", + "grid", + "lifecycle", + "rlang" + ], + "Hash": "b29cf3031f49b04ab9c852c912547eef" + }, + "gtfsio": { + "Package": "gtfsio", + "Version": "1.1.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "data.table", + "utils", + "zip" + ], + "Hash": "535763c67daa71cec7c9b7e3f6c5d2c8" + }, + "hablar": { + "Package": "hablar", + "Version": "0.3.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "dplyr", + "lubridate", + "purrr" + ], + "Hash": "c7a6a49207405553fd26eff865d94360" + }, + "haven": { + "Package": "haven", + "Version": "2.5.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "cpp11", + "forcats", + "hms", + "lifecycle", + "methods", + "readr", + "rlang", + "tibble", + "tidyselect", + "vctrs" + ], + "Hash": "9171f898db9d9c4c1b2c745adc2c1ef1" + }, + "highr": { + "Package": "highr", + "Version": "0.10", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "xfun" + ], + "Hash": "06230136b2d2b9ba5805e1963fa6e890" + }, + "hms": { + "Package": "hms", + "Version": "1.1.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "lifecycle", + "methods", + "pkgconfig", + "rlang", + "vctrs" + ], + "Hash": "b59377caa7ed00fa41808342002138f9" + }, + "htmltools": { + "Package": "htmltools", + "Version": "0.5.7", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "base64enc", + "digest", + "ellipsis", + "fastmap", + "grDevices", + "rlang", + "utils" + ], + "Hash": "2d7b3857980e0e0d0a1fd6f11928ab0f" + }, "httr": { "Package": "httr", "Version": "1.4.7", @@ -273,6 +845,50 @@ ], "Hash": "ac107251d9d9fd72f0ca8049988f1d7f" }, + "ids": { + "Package": "ids", + "Version": "1.0.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "openssl", + "uuid" + ], + "Hash": "99df65cfef20e525ed38c3d2577f7190" + }, + "igraph": { + "Package": "igraph", + "Version": "1.6.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "Matrix", + "R", + "cli", + "cpp11", + "grDevices", + "graphics", + "lifecycle", + "magrittr", + "methods", + "pkgconfig", + "rlang", + "stats", + "utils" + ], + "Hash": "eef74fe28b747e52288ea9e1d3600034" + }, + "isoband": { + "Package": "isoband", + "Version": "0.2.7", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "grid", + "utils" + ], + "Hash": "0080607b4a1a7b28979aecef976d8bc2" + }, "iterators": { "Package": "iterators", "Version": "1.0.14", @@ -284,6 +900,16 @@ ], "Hash": "8954069286b4b2b0d023d1b288dce978" }, + "jquerylib": { + "Package": "jquerylib", + "Version": "0.1.4", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "htmltools" + ], + "Hash": "5aab57a3bd297eee1c1d862735972182" + }, "jsonlite": { "Package": "jsonlite", "Version": "1.8.8", @@ -294,6 +920,33 @@ ], "Hash": "e1b9c55281c5adc4dd113652d9e26768" }, + "knitr": { + "Package": "knitr", + "Version": "1.45", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "evaluate", + "highr", + "methods", + "tools", + "xfun", + "yaml" + ], + "Hash": "1ec462871063897135c1bcbe0fc8f07d" + }, + "labeling": { + "Package": "labeling", + "Version": "0.4.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "graphics", + "stats" + ], + "Hash": "b64ec208ac5bc1852b285f665d6368b3" + }, "lattice": { "Package": "lattice", "Version": "0.22-5", @@ -322,6 +975,19 @@ ], "Hash": "b8552d117e1b808b09a832f589b79035" }, + "lubridate": { + "Package": "lubridate", + "Version": "1.9.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "generics", + "methods", + "timechange" + ], + "Hash": "680ad542fbcf801442c83a6ac5a2126c" + }, "lwgeom": { "Package": "lwgeom", "Version": "0.2-13", @@ -343,17 +1009,118 @@ "Requirements": [ "R" ], - "Hash": "7ce2733a9826b3aeb1775d56fd305472" + "Hash": "7ce2733a9826b3aeb1775d56fd305472" + }, + "memoise": { + "Package": "memoise", + "Version": "2.0.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "cachem", + "rlang" + ], + "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c" + }, + "mgcv": { + "Package": "mgcv", + "Version": "1.9-1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "Matrix", + "R", + "graphics", + "methods", + "nlme", + "splines", + "stats", + "utils" + ], + "Hash": "110ee9d83b496279960e162ac97764ce" + }, + "mime": { + "Package": "mime", + "Version": "0.12", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "tools" + ], + "Hash": "18e9c28c1d3ca1560ce30658b22ce104" + }, + "modelr": { + "Package": "modelr", + "Version": "0.1.11", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "broom", + "magrittr", + "purrr", + "rlang", + "tibble", + "tidyr", + "tidyselect", + "vctrs" + ], + "Hash": "4f50122dc256b1b6996a4703fecea821" + }, + "munsell": { + "Package": "munsell", + "Version": "0.5.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "colorspace", + "methods" + ], + "Hash": "6dfe8bf774944bd5595785e3229d8771" + }, + "nabor": { + "Package": "nabor", + "Version": "0.5.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "BH", + "R", + "Rcpp", + "RcppEigen", + "methods" + ], + "Hash": "54388cc0cf3dfc0948a4bfbf9e8cceba" + }, + "nlme": { + "Package": "nlme", + "Version": "3.1-164", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "graphics", + "lattice", + "stats", + "utils" + ], + "Hash": "a623a2239e642806158bc4dc3f51565d" }, - "mime": { - "Package": "mime", - "Version": "0.12", + "nngeo": { + "Package": "nngeo", + "Version": "0.4.7", "Source": "Repository", - "Repository": "CRAN", + "Repository": "RSPM", "Requirements": [ - "tools" + "R", + "data.table", + "methods", + "nabor", + "parallel", + "sf", + "units" ], - "Hash": "18e9c28c1d3ca1560ce30658b22ce104" + "Hash": "e07deca5934b4e567b2089d5ebc85ef7" }, "openssl": { "Package": "openssl", @@ -407,6 +1174,43 @@ ], "Hash": "01f28d4278f15c76cddbea05899c5d6f" }, + "prettyunits": { + "Package": "prettyunits", + "Version": "1.2.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "6b01fc98b1e86c4f705ce9dcfd2f57c7" + }, + "processx": { + "Package": "processx", + "Version": "3.8.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "ps", + "utils" + ], + "Hash": "82d48b1aec56084d9438dbf98087a7e9" + }, + "progress": { + "Package": "progress", + "Version": "1.2.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "crayon", + "hms", + "prettyunits" + ], + "Hash": "f4625e061cb2865f111b47ff163a5ca6" + }, "proxy": { "Package": "proxy", "Version": "0.4-27", @@ -419,6 +1223,17 @@ ], "Hash": "e0ef355c12942cf7a6b91a6cfaea8b3e" }, + "ps": { + "Package": "ps", + "Version": "1.7.6", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "utils" + ], + "Hash": "dd2b9319ee0656c8acf45c7f40c59de7" + }, "purrr": { "Package": "purrr", "Version": "1.0.2", @@ -434,6 +1249,27 @@ ], "Hash": "1cba04a4e9414bdefc9dcaa99649a8dc" }, + "ragg": { + "Package": "ragg", + "Version": "1.2.7", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "systemfonts", + "textshaping" + ], + "Hash": "90a1b8b7e518d7f90480d56453b4d062" + }, + "rappdirs": { + "Package": "rappdirs", + "Version": "0.3.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "5e3c5dc0b071b21fa128676560dbe94d" + }, "raster": { "Package": "raster", "Version": "3.6-26", @@ -448,6 +1284,61 @@ ], "Hash": "7d6eda494f34a644420ac1bfd2a8023a" }, + "readr": { + "Package": "readr", + "Version": "2.1.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "cli", + "clipr", + "cpp11", + "crayon", + "hms", + "lifecycle", + "methods", + "rlang", + "tibble", + "tzdb", + "utils", + "vroom" + ], + "Hash": "9de96463d2117f6ac49980577939dfb3" + }, + "readxl": { + "Package": "readxl", + "Version": "1.4.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cellranger", + "cpp11", + "progress", + "tibble", + "utils" + ], + "Hash": "8cf9c239b96df1bbb133b74aef77ad0a" + }, + "rematch": { + "Package": "rematch", + "Version": "2.0.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "cbff1b666c6fa6d21202f07e2318d4f1" + }, + "rematch2": { + "Package": "rematch2", + "Version": "2.1.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "tibble" + ], + "Hash": "76c9e04c712a05848ae7a23d2f170a40" + }, "renv": { "Package": "renv", "Version": "1.0.3", @@ -458,6 +1349,28 @@ ], "Hash": "41b847654f567341725473431dd0d5ab" }, + "reprex": { + "Package": "reprex", + "Version": "2.1.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "callr", + "cli", + "clipr", + "fs", + "glue", + "knitr", + "lifecycle", + "rlang", + "rmarkdown", + "rstudioapi", + "utils", + "withr" + ], + "Hash": "1425f91b4d5d9a8f25352c44a3d914ed" + }, "rlang": { "Package": "rlang", "Version": "1.1.3", @@ -469,6 +1382,57 @@ ], "Hash": "42548638fae05fd9a9b5f3f437fbbbe2" }, + "rmarkdown": { + "Package": "rmarkdown", + "Version": "2.25", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "bslib", + "evaluate", + "fontawesome", + "htmltools", + "jquerylib", + "jsonlite", + "knitr", + "methods", + "stringr", + "tinytex", + "tools", + "utils", + "xfun", + "yaml" + ], + "Hash": "d65e35823c817f09f4de424fcdfa812a" + }, + "rstudioapi": { + "Package": "rstudioapi", + "Version": "0.15.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "5564500e25cffad9e22244ced1379887" + }, + "rvest": { + "Package": "rvest", + "Version": "1.0.3", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "glue", + "httr", + "lifecycle", + "magrittr", + "rlang", + "selectr", + "tibble", + "withr", + "xml2" + ], + "Hash": "a4a5ac819a467808c60e36e92ddf195e" + }, "s2": { "Package": "s2", "Version": "1.1.6", @@ -481,6 +1445,53 @@ ], "Hash": "32f7b1a15bb01ae809022960abad5363" }, + "sass": { + "Package": "sass", + "Version": "0.4.8", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R6", + "fs", + "htmltools", + "rappdirs", + "rlang" + ], + "Hash": "168f9353c76d4c4b0a0bbf72e2c2d035" + }, + "scales": { + "Package": "scales", + "Version": "1.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "RColorBrewer", + "cli", + "farver", + "glue", + "labeling", + "lifecycle", + "munsell", + "rlang", + "viridisLite" + ], + "Hash": "c19df082ba346b0ffa6f833e92de34d1" + }, + "selectr": { + "Package": "selectr", + "Version": "0.4-2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "R6", + "methods", + "stringr" + ], + "Hash": "3838071b66e0c566d55cc26bd6e27bf4" + }, "sf": { "Package": "sf", "Version": "1.0-15", @@ -569,6 +1580,17 @@ "Repository": "CRAN", "Hash": "3a1be13d68d47a8cd0bfd74739ca1555" }, + "systemfonts": { + "Package": "systemfonts", + "Version": "1.0.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cpp11" + ], + "Hash": "15b594369e70b975ba9f064295983499" + }, "terra": { "Package": "terra", "Version": "1.7-65", @@ -581,6 +1603,18 @@ ], "Hash": "8e245fd4eab07bf55ddb2e6ea353c0e1" }, + "textshaping": { + "Package": "textshaping", + "Version": "0.3.7", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cpp11", + "systemfonts" + ], + "Hash": "997aac9ad649e0ef3b97f96cddd5622b" + }, "tibble": { "Package": "tibble", "Version": "3.2.1", @@ -639,6 +1673,96 @@ ], "Hash": "79540e5fcd9e0435af547d885f184fd5" }, + "tidytransit": { + "Package": "tidytransit", + "Version": "1.6.1", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "data.table", + "digest", + "dplyr", + "geodist", + "gtfsio", + "hms", + "rlang", + "sf" + ], + "Hash": "5eba02e60ef95c0f26731b0938bf24bc" + }, + "tidyverse": { + "Package": "tidyverse", + "Version": "2.0.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "broom", + "cli", + "conflicted", + "dbplyr", + "dplyr", + "dtplyr", + "forcats", + "ggplot2", + "googledrive", + "googlesheets4", + "haven", + "hms", + "httr", + "jsonlite", + "lubridate", + "magrittr", + "modelr", + "pillar", + "purrr", + "ragg", + "readr", + "readxl", + "reprex", + "rlang", + "rstudioapi", + "rvest", + "stringr", + "tibble", + "tidyr", + "xml2" + ], + "Hash": "c328568cd14ea89a83bd4ca7f54ae07e" + }, + "timechange": { + "Package": "timechange", + "Version": "0.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cpp11" + ], + "Hash": "c5f3c201b931cd6474d17d8700ccb1c8" + }, + "tinytex": { + "Package": "tinytex", + "Version": "0.49", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "xfun" + ], + "Hash": "5ac22900ae0f386e54f1c307eca7d843" + }, + "tzdb": { + "Package": "tzdb", + "Version": "0.4.0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cpp11" + ], + "Hash": "f561504ec2897f4d46f0c7657e488ae1" + }, "units": { "Package": "units", "Version": "0.8-5", @@ -660,6 +1784,16 @@ ], "Hash": "62b65c52671e6665f803ff02954446e9" }, + "uuid": { + "Package": "uuid", + "Version": "1.2-0", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "303c19bfd970bece872f93a824e323d9" + }, "vctrs": { "Package": "vctrs", "Version": "0.6.5", @@ -674,6 +1808,42 @@ ], "Hash": "c03fa420630029418f7e6da3667aac4a" }, + "viridisLite": { + "Package": "viridisLite", + "Version": "0.4.2", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R" + ], + "Hash": "c826c7c4241b6fc89ff55aaea3fa7491" + }, + "vroom": { + "Package": "vroom", + "Version": "1.6.5", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "bit64", + "cli", + "cpp11", + "crayon", + "glue", + "hms", + "lifecycle", + "methods", + "progress", + "rlang", + "stats", + "tibble", + "tidyselect", + "tzdb", + "vctrs", + "withr" + ], + "Hash": "390f9315bc0025be03012054103d227c" + }, "withr": { "Package": "withr", "Version": "3.0.0", @@ -695,6 +1865,44 @@ "R" ], "Hash": "5d4545e140e36476f35f20d0ca87963e" + }, + "xfun": { + "Package": "xfun", + "Version": "0.41", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "stats", + "tools" + ], + "Hash": "460a5e0fe46a80ef87424ad216028014" + }, + "xml2": { + "Package": "xml2", + "Version": "1.3.6", + "Source": "Repository", + "Repository": "CRAN", + "Requirements": [ + "R", + "cli", + "methods", + "rlang" + ], + "Hash": "1d0336142f4cd25d8d23cd3ba7a8fb61" + }, + "yaml": { + "Package": "yaml", + "Version": "2.3.8", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "29240487a071f535f5e5d5a323b7afbd" + }, + "zip": { + "Package": "zip", + "Version": "2.3.0", + "Source": "Repository", + "Repository": "CRAN", + "Hash": "d98c94dacb7e0efcf83b0a133a705504" } } } From d99ee1a61e417491a30ff09bb13869f08ac72bc2 Mon Sep 17 00:00:00 2001 From: jafshin Date: Mon, 29 Jan 2024 16:33:52 +1100 Subject: [PATCH 056/103] fixing the dir url for data and setting process osm to default --- NetworkGenerator.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index b886c1f..f841aa9 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -43,7 +43,7 @@ makeNetwork<-function(city, outputFileName = "test"){ region = "./data/processed/greater_melbourne.sqlite" outputCrs = 7899 osmGpkg = "./data/processed/melbourne_osm.gpkg" - unconfiguredSqlite = "../data/processed/melbourne_network_unconfigured.sqlite" + unconfiguredSqlite = "./data/processed/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" # osmPbfExtract = "./data/melbourne_australia.osm.pbf" @@ -65,7 +65,7 @@ makeNetwork<-function(city, outputFileName = "test"){ # NETWORK FROM OSM # A flag for whether to build unconfigured network from osm extract (if not, # must already have unconfigured sqlite) - networkFromOsm=F + networkFromOsm=T saveUnconfigured=T # SIMPLIFICATION From 339797d4bded487b4e3570e2d8585b38fa93ef87 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 30 Jan 2024 09:11:04 +1100 Subject: [PATCH 057/103] update readme information --- NetworkGenerator.R | 22 +++++++++----------- README.md | 50 ++++++++++++++++++++++------------------------ data/README.md | 4 ++++ 3 files changed, 37 insertions(+), 39 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f841aa9..9198338 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -15,40 +15,36 @@ makeNetwork<-function(city, outputFileName = "test"){ # • osmGpkg: location where downloaded OSM extract for region is to be stored # (if 'downloadOsm=T') and/or read from (if 'processOsm=T') # • unconfiguredSqlite: location where processed OSM file is to be stored - # (if 'processOsm=T') or read from (if 'processOsm=F') + # (if 'networkFromOsm=T') or read from (if 'networkFromOsm=F') # • cropAreaPoly: if 'crop2TestArea=T' cropArea location from # https://github.com/JamesChevalier/cities/tree/master/australia/victoria # (only supported for Victoria at this stage) # • demFile: if 'addElevation=T', digital elevation model raster file (must be # in same coordinate system as network) - # • osmPbfExtract: if 'addDestinationLayer=T', OSM extract for destinations, - # in .osm.pbf format # • ndviFile: if 'addNDVI=T', raster file with NDVI values (must be in same # coordinate system as network) # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) if (city == "Bendigo") { - region = "./data/processed/greater_bendigo.sqlite" + region = "./data/greater_bendigo.sqlite" outputCrs = 7899 - osmGpkg = "./data/processed/bendigo_osm.gpkg" - unconfiguredSqlite = "./data/processed/bendigo_network_unconfigured.sqlite" + osmGpkg = "./data/bendigo_osm.gpkg" + unconfiguredSqlite = "./data/bendigo_network_unconfigured.sqlite" cropAreaPoly = "" # must set 'crop2Area=F' # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE - # osmPbfExtract = "./data/brisbane_australia.osm.pbf" # ndviFile = "" # must set 'addNDVI=F' - gtfs_feed = "./data/processed/gtfs.zip" + gtfs_feed = "./data/gtfs.zip" } else if (city == "Melbourne") { - region = "./data/processed/greater_melbourne.sqlite" + region = "./data/greater_melbourne.sqlite" outputCrs = 7899 - osmGpkg = "./data/processed/melbourne_osm.gpkg" - unconfiguredSqlite = "./data/processed/melbourne_network_unconfigured.sqlite" + osmGpkg = "./data/melbourne_osm.gpkg" + unconfiguredSqlite = "./data/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" - # osmPbfExtract = "./data/melbourne_australia.osm.pbf" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" - gtfs_feed = "./data/processed/gtfs.zip" + gtfs_feed = "./data/gtfs.zip" } else { echo(paste("City parameters for", city, "have not been set; unable to proceed\n")) diff --git a/README.md b/README.md index 2ebb4d2..b5592e1 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,18 @@ # MATSim network for Melbourne `master`![passing?](https://github.com/matsim-melbourne/network/workflows/build/badge.svg?branch=master) `dev`![passing?](https://github.com/matsim-melbourne/network/workflows/build/badge.svg?branch=dev) -This page explains the steps for building a road network model for active transport simulation models such as MATSim, including active transportation related infrastructure and attribute. +This page explains the steps for building a road network model for active transport simulation models such as MATSim, including active transportation related infrastructure and attributes. ## Publications - Jafari, A., Both, A., Singh, D., Gunn, L., & Giles-Corti, B. (2022). [Building the road network for city-scale active transport simulation models](https://doi.org/10.1016/j.simpat.2021.102398). *Simulation Modelling Practice and Theory*, 114, 102398 ( [Pre-print version](https://arxiv.org/abs/2104.03063) ) ## Prerequisites -* Postgres and postgis * GDAL * R 4.2+ ## Building the network -Network generation code is written primarily in R programming language, with some accompanying SQL and Bash scripts, therefore a working knowledge of R is expected. - -To get started, you must first prepare the required input files for generating the network. -There are multiple entry points to the algorithm. If you want to start from raw OSM extract, download the extract for your region. Please note that it should be in .osm format, .osm.pbf is not yet supported. You can use [osmconvert](https://wiki.openstreetmap.org/wiki/Osmconvert) easily convert .osm.pbf to .osm. +Network generation code is written primarily in R programming language,therefore a working knowledge of R is expected. All required R packages must be installed before running the algorithm. `renv` will take of that for you and you just need to run the following in R to install the packages: ``` @@ -27,9 +23,29 @@ renv::restore() Before running the algorithm, adjust the parameters and input/output file names for your scenario in `NetworkGenerator.R`. Adjustable parameters are listed under the Parameters sub-heading. -To run the network generation algorithm from the terminal, you need to run something like below, specifying your desired output folder name as the argument for `makeNetwork()`: +Running the algorithm requires an input parameter 'city', and adjustable parameters must be completed for that city, specifying locations of relevant input files and the applicable CRS. If running for a location for which 'city' parameters have not already been defined, then these must be added, using existing city parameters as a template. + +The city parameters are as follows. +* region - required if OSM extract is to be downloaded, or destinations (see below) are to be extracted. This must be the location of a file in sqlite format which defines the boundary of the area for which the OSM extract is required. +* outputCrs - specify the appropriate EPSG coordinate reference system number for the region. +* osmGpkg - the location to which an OSM extract in .gpkg format will be saved, or where an existing .gpkg file is stored if already held. +* unconfiguredSqlite - the location to which an unconfigured network in .sqlite format will be saved, or where an existing unconfigured network is stored if already held. +* cropAreaPoly - an optional parameter for cropping the OSM extract to a smaller test area. +* demFile - required if 'addElevation' is set to 'T'. This must be the location of a digital elevation model raster file in the same CRS as the the network. +* ndviFile - required if 'addNDVI' is set to 'T'. This must be the location of a raster file with NDVI values in the same CRS as the network. +* gtfs_feed - required if 'addGtfs' or 'addDestinationLayer' is set to 'T'. This must be the location of a zip file containing GTFS data. + +The parameters assume that the relevant files are stored in the a 'data' subdirectory. + +The algorithm will do the following: +* if 'downloadOsm' is set to 'T', download an OSM extract for the selected 'region' and save it as a .gpkg file. +* if 'networkFromOsm' is set to 'T', process the downloaded OSM extract to an unconfigured network in the form of an .sqlite file with layers of nodes, edges and osm tags ('osm_metadata'). +* simplify the network, producing an output network in .sqlite format (with options to select .shp and .xml formats as well). +* if 'addDestination' is set to 'T', include a layer of destination points of interest for use in accessibility analysis, such as as supermarkets, doctors and schools, drawn from OSM and GTFS layers. + +To run the network generation algorithm from the terminal, you need to run something like below, specifying your city and your desired output folder name as the arguments for `makeNetwork()`: ``` -Rscript -e 'source("NetworkGenerator.R"); makeNetwork("example")' +Rscript -e 'source("NetworkGenerator.R"); makeNetwork(, "Melbourne", "example")' ``` @@ -37,24 +53,6 @@ Rscript -e 'source("NetworkGenerator.R"); makeNetwork("example")' ### Installing sf SF package in R requires a few dependencies, see https://r-spatial.github.io/sf/ for more details. -### Password authentication error when using postgres -One solution is to temporarly change the METHOD in `pg_hba.conf` from md5 to trust. See [this post](https://hassanannajjar.medium.com/how-to-fix-error-password-authentication-failed-for-the-user-in-postgresql-896e1fd880dc) for more details. - - -### Postgres install on macOS 10.15+ -``` -brew install postgres -brew install postgis -brew services start postresql -/usr/local/opt/postgres/bin/createuser -s postgres -``` -If you have osgeo-postgis you have may to unlink that and link the newly installed postgis as follows. -``` -brew unlink osgeo-postgis -brew link postgis -``` -Then restart your machine. - ### iGraph install on macOS If running R from a homebrew install, be sure to unlink suite-sparse before installing iGraph ``` diff --git a/data/README.md b/data/README.md index cd46b6f..3436baf 100644 --- a/data/README.md +++ b/data/README.md @@ -1,3 +1,7 @@ +# Update required + +The information below requires update, once final data location and structure is determined. + # Network generation data This directory contains inputs files required to generate a MATSim network for Melbourne, which can be found [here](https://cloudstor.aarnet.edu.au/plus/s/ssLkX8Uez64rV3D). Alternatively, you can use the `./prepare.sh` command as described below to download the data you need. From 929707bfd0301bb52eebbfe271bd36e7fb18d48d Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 30 Jan 2024 14:52:36 +1100 Subject: [PATCH 058/103] deleting bash and sql scripts for network generation, replaced by processOsm.R --- network.sql | 247 -------------------------------------------------- processOSM.sh | 90 ------------------ 2 files changed, 337 deletions(-) delete mode 100755 network.sql delete mode 100755 processOSM.sh diff --git a/network.sql b/network.sql deleted file mode 100755 index 715a5c1..0000000 --- a/network.sql +++ /dev/null @@ -1,247 +0,0 @@ --- turning timing on -\timing --- transforms the geometries to a projected (i.e, x,y) system and snaps to the --- nearest metre. Was using GDA2020 (7845), now using MGA Zone 55 (28355) -ALTER TABLE roads - ALTER COLUMN geom TYPE geometry(LineString,:v1) - USING ST_SnapToGrid(ST_Transform(geom,:v1),1); -ALTER TABLE roads_points - ALTER COLUMN geom TYPE geometry(Point,:v1) - USING ST_SnapToGrid(ST_Transform(geom,:v1),1); -CREATE INDEX roads_points_gix ON roads USING GIST (geom); - --- determine if the road segment is a bridge or tunnel -ALTER TABLE roads ADD COLUMN bridge_or_tunnel BOOLEAN; -UPDATE roads - SET bridge_or_tunnel = - CASE WHEN other_tags LIKE '%bridge%' OR other_tags LIKE '%tunnel%' THEN TRUE - ELSE FALSE END; -CREATE INDEX roads_gix ON roads USING GIST (geom); - --- find the bridge-bridge or road-road intersections -DROP TABLE IF EXISTS line_intersections; -CREATE TABLE line_intersections AS -SELECT a.osm_id AS osm_id_a, b.osm_id AS osm_id_b, - ST_Intersection(a.geom,b.geom) AS geom -FROM roads AS a, roads AS b -WHERE a.osm_id < b.osm_id AND - a.bridge_or_tunnel = b.bridge_or_tunnel AND - ST_Intersects(a.geom, b.geom) = TRUE; - --- group the intersections by osm_id -DROP TABLE IF EXISTS line_intersections_grouped; -CREATE TABLE line_intersections_grouped AS -SELECT c.osm_id, st_unaryunion(st_collect(c.geom)) AS geom -FROM - (SELECT a.osm_id_a AS osm_id, a.geom - FROM line_intersections as a - UNION - SELECT b.osm_id_b AS osm_id, b.geom - FROM line_intersections AS b) AS c -GROUP BY osm_id; - --- take the intersections, buffer them 0.01m, and use them to cut the lines they --- intersect. We then snap to the nearest metre, ensuring there are no gaps. --- Only intersections with the same osm_id need to be considered -DROP TABLE IF EXISTS line_cut; -CREATE TABLE line_cut AS -SELECT a.osm_id, -(ST_Dump(ST_SnapToGrid(ST_Difference(a.geom,ST_Buffer(b.geom,0.01)),1))).geom AS geom -FROM roads AS a, line_intersections_grouped AS b -WHERE a.osm_id = b.osm_id; - --- all the osm_ids currently processed. Some segments don't have any --- intersections so they will need to be added. Adding an index here to speedup --- processing -DROP TABLE IF EXISTS unique_ids; -CREATE TABLE unique_ids AS -SELECT DISTINCT osm_id -FROM line_cut; -CREATE UNIQUE INDEX osm_id_idx ON unique_ids (osm_id); - --- adding the remaining road segments -INSERT INTO line_cut -SELECT a.osm_id, a.geom -FROM roads AS a, - (SELECT osm_id - FROM roads - EXCEPT - SELECT osm_id - FROM unique_ids) AS b -WHERE a.osm_id = b.osm_id; -CREATE INDEX line_cut_gix ON line_cut USING GIST (geom); -ALTER TABLE line_cut ADD COLUMN lid SERIAL PRIMARY KEY; - --- find all of the road segment endpoints, including the new ones we've added --- from the intersections -DROP TABLE IF EXISTS endpoints; -CREATE TABLE endpoints AS -SELECT ST_StartPoint(a.geom) as geom -FROM line_cut as a -UNION -SELECT ST_EndPoint(b.geom) as geom -FROM line_cut as b; -CREATE INDEX endpoints_gix ON endpoints USING GIST (geom); - --- cluster the endpoints to assign each unique endpoint location an id -DROP TABLE IF EXISTS endpoints_clustered; -CREATE TABLE endpoints_clustered AS -SELECT (ST_Dump(a.geom)).geom AS geom -FROM - (SELECT ST_Union(geom) AS geom - FROM endpoints) AS a; -ALTER TABLE endpoints_clustered ADD COLUMN id SERIAL PRIMARY KEY; -CREATE INDEX endpoints_clustered_gix ON endpoints_clustered USING GIST (geom); - --- most lines will only have 2 endpoints near them, but some will have extra -DROP TABLE IF EXISTS endpoints_near_lines; -CREATE TABLE endpoints_near_lines AS -SELECT c.lid, c.num_endpoints, c.geom -FROM - (SELECT a.lid, COUNT(b.id) AS num_endpoints, - st_unaryunion(st_collect(b.geom)) AS geom - FROM - line_cut AS a, - endpoints_clustered AS b - WHERE - st_intersects(st_buffer(a.geom,0.1),b.geom) - GROUP BY - a.lid - ) AS c -WHERE - c.num_endpoints > 2; - -DROP TABLE IF EXISTS line_cut2; -CREATE TABLE line_cut2 AS -SELECT a.lid, a.osm_id, -(ST_Dump(ST_SnapToGrid(ST_Difference(a.geom,ST_Buffer(b.geom,0.01)),1))).geom AS geom -FROM line_cut AS a, endpoints_near_lines AS b -WHERE a.lid = b.lid; - -DELETE FROM line_cut -WHERE lid IN - (SELECT lid FROM line_cut2); - - -DROP TABLE IF EXISTS line_cut3; -CREATE TABLE line_cut3 AS - SELECT osm_id, geom - FROM line_cut - WHERE lid NOT IN (SELECT lid FROM line_cut2) -UNION - SELECT osm_id, geom - FROM line_cut2; - -- add length -ALTER TABLE line_cut3 ADD COLUMN length INTEGER; -UPDATE line_cut3 SET length = ST_Length(geom); - --- add from and to id columns -ALTER TABLE line_cut3 ADD COLUMN from_id INTEGER; -ALTER TABLE line_cut3 ADD COLUMN to_id INTEGER; - --- assign the from and to ids to the road segments -UPDATE line_cut3 AS a - SET from_id = b.id -FROM - endpoints_clustered as b -WHERE - ST_Intersects(ST_StartPoint(a.geom),b.geom); - -UPDATE line_cut3 AS a - SET to_id = b.id -FROM - endpoints_clustered as b -WHERE - ST_Intersects(ST_EndPoint(a.geom),b.geom); - -DELETE FROM line_cut3 -WHERE ST_isEmpty(geom); - - --- This doesn't seem to be necessary, but will keep this in in case we do need --- it for other networks. It finds all the unique ids used by the line_cut3 --- table and builds an index on it. -DROP TABLE IF EXISTS unique_node_ids; -CREATE TABLE unique_node_ids AS -SELECT DISTINCT c.id -FROM - (SELECT DISTINCT a.from_id AS id - FROM line_cut3 AS a - UNION - SELECT DISTINCT b.to_id AS id - FROM line_cut3 AS b) as c; -CREATE UNIQUE INDEX unique_node_ids_idx ON unique_node_ids (id); - --- filters endpoints_clustered to only have the nodes used in line_cut3 -DROP TABLE IF EXISTS endpoints_filtered; -CREATE TABLE endpoints_filtered AS -SELECT a.id, a.geom -FROM endpoints_clustered AS a, - unique_node_ids AS b -WHERE - a.id = b.id; -CREATE INDEX endpoints_filtered_gix ON endpoints_filtered USING GIST (geom); - --- the non-spatial data for the osm_id entries present in the network -DROP TABLE IF EXISTS osm_metadata; -CREATE TABLE osm_metadata AS -SELECT osm_id, highway, other_tags -FROM roads -WHERE osm_id IN - (SELECT DISTINCT osm_id FROM line_cut3); - --- from and to ids of edges that are roundabouts -DROP TABLE IF EXISTS edges_roundabout; -CREATE TABLE edges_roundabout AS -SELECT a.from_id, - a.to_id, - CASE WHEN b.other_tags LIKE '%roundabout%' THEN TRUE - ELSE FALSE - END AS is_roundabout -FROM - line_cut3 AS a, - osm_metadata AS b -WHERE - a.osm_id = b.osm_id; - --- node ids that connect to roundabouts -DROP TABLE IF EXISTS nodes_roundabout; -CREATE TABLE nodes_roundabout AS -SELECT DISTINCT c.id -FROM - (SELECT DISTINCT a.from_id AS id - FROM edges_roundabout AS a - WHERE is_roundabout = TRUE - UNION - SELECT DISTINCT b.to_id AS id - FROM edges_roundabout AS b - WHERE is_roundabout = TRUE) AS c; - --- nodes attributed with if they are at roundabouts or traffic signals -DROP TABLE IF EXISTS nodes_attributed; -CREATE TABLE nodes_attributed AS -SELECT c.id, - CASE WHEN c.id IN (SELECT id FROM nodes_roundabout) THEN 1 - ELSE 0 - END AS is_roundabout, - CASE WHEN c.length <= 20 THEN 1 - ELSE 0 - END AS is_signal, - c.geom -FROM - (SELECT - a.id, - a.geom, - ST_Distance(a.geom,b.geom) AS length - FROM - endpoints_filtered AS a - CROSS JOIN LATERAL - (SELECT geom - FROM roads_points - ORDER BY - a.geom <-> geom - LIMIT 1) AS b - ) AS c; - -CREATE INDEX nodes_attributed_gix ON nodes_attributed USING GIST (geom); - diff --git a/processOSM.sh b/processOSM.sh deleted file mode 100755 index b5f0f4c..0000000 --- a/processOSM.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash - -extract=$1 -crs=$2 -output=$3 - -#extract='./data/melbourne_australia.osm' -#crs=28355 -#output='./data/network3.sqlite' - -# change to the directory this script is located in -cd "$(dirname "$0")" -# extract the roads from the osm file, put in temp.sqlite -ogr2ogr -update -overwrite -nln roads -f "SQLite" -dsco SPATIALITE=YES \ - -dialect SQLite -sql \ - "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ - GEOMETRY FROM lines \ - WHERE (highway IS NOT NULL AND \ - NOT highway = 'bridleway' AND \ - NOT highway = 'bus_stop' AND \ - NOT highway = 'co' AND \ - NOT highway = 'platform' AND \ - NOT highway = 'raceway' AND \ - NOT highway = 'services' AND \ - NOT highway = 'traffic_island' AND \ - highway NOT LIKE '%construction%' AND \ - highway NOT LIKE '%proposed%' AND \ - highway NOT LIKE '%disused%' AND \ - highway NOT LIKE '%abandoned%') AND \ - (other_tags IS NULL OR - (other_tags NOT LIKE '%busbar%' AND \ - other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%\"access\"=>\"private\"%')) " \ - ./data/temp.sqlite $extract -# highway NOT LIKE '%service%' AND \ -# Removed since some service roads are used as footpaths (e.g., Royal Exhibition -# building) - -# bridleway can be used for walking and cycling (provided you give way to horses -# they are more common in the UK. - -# extract the traffic signals, put in temp.sqlite -ogr2ogr -update -overwrite -nln roads_points -f "SQLite" -dsco SPATIALITE=YES \ - -dialect SQLite -sql \ - "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ - GEOMETRY FROM points \ - WHERE highway LIKE '%traffic_signals%' " \ - ./data/temp.sqlite $extract - -# extract the train and tram lines and add to temp.sqlite -# apparently there are miniature railways -ogr2ogr -update -overwrite -nln pt -f "SQLite" -dialect SQLite -sql \ - "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ - GEOMETRY FROM lines \ - WHERE other_tags LIKE '%railway%' AND \ - other_tags NOT LIKE '%busbar%' AND \ - other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%miniature%' AND \ - other_tags NOT LIKE '%proposed%' AND \ - other_tags NOT LIKE '%disused%' AND \ - other_tags NOT LIKE '%preserved%' AND \ - other_tags NOT LIKE '%construction%' AND \ - other_tags NOT LIKE '%\"service\"=>\"yard\"%'" \ - ./data/temp.sqlite $extract - -# the postgres database name. -DB_NAME="network_test" - -# Delete the database if it already exists -COMMAND="psql -U postgres -c 'DROP DATABASE ${DB_NAME}' postgres" -eval $COMMAND -# Create the database and add the postgis extension -createdb -U postgres ${DB_NAME} -psql -c 'create extension postgis' ${DB_NAME} postgres - -ogr2ogr -overwrite -lco GEOMETRY_NAME=geom -lco SCHEMA=public -f "PostgreSQL" \ - PG:"host=localhost port=5432 user=postgres dbname=${DB_NAME}" \ - -a_srs "EPSG:4326" ./data/temp.sqlite roads -ogr2ogr -overwrite -lco GEOMETRY_NAME=geom -lco SCHEMA=public -f "PostgreSQL" \ - PG:"host=localhost port=5432 user=postgres dbname=${DB_NAME}" \ - -a_srs "EPSG:4326" ./data/temp.sqlite roads_points - -# run the sql statements -psql -U postgres -d ${DB_NAME} -a -f network.sql -v v1="$crs" - -# extract the nodes, edges, and osm metadata to the network file -ogr2ogr -update -overwrite -f SQLite -dsco SPATIALITE=yes $output PG:"dbname=${DB_NAME} user=postgres" public.line_cut3 -nln edges -ogr2ogr -update -overwrite -f SQLite -update $output PG:"dbname=${DB_NAME} user=postgres" public.nodes_attributed -nln nodes -ogr2ogr -update -overwrite -f SQLite -update $output PG:"dbname=${DB_NAME} user=postgres" public.osm_metadata -nln osm_metadata - From 83c5859d78607dee78026941389e282b95208336 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 30 Jan 2024 15:34:12 +1100 Subject: [PATCH 059/103] avoid trunk roads being converted to non-cyclable when bikepaths rejoined --- functions/restructureData.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/restructureData.R b/functions/restructureData.R index 098c85d..68a5bd2 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -25,7 +25,7 @@ restructureData <- function(networkList, highway_lookup, # merging changed bikepaths back with rest of the links links <- links %>% mutate(cycleway=ifelse(uid %in% bikepath_uids,0,cycleway)) %>% # removing bikepaths from those that had it merged - mutate(is_cycle=ifelse(highway_order%in%c(1,8,2,9),0,is_cycle)) %>% # removing bikepaths from those that had it merged + mutate(is_cycle = ifelse(uid %in% bikepath_uids & highway_order %in% c(1, 8), 0, is_cycle)) %>% # removing bikepaths from those that had it merged dplyr::select(-uid) %>% rbind(bikepaths) From b6bfd0a5bb01318aa8bfc84b1135e6c5558ff8f8 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 1 Feb 2024 18:39:40 +1100 Subject: [PATCH 060/103] change highway order for service so bikepaths can be separated from service roads --- functions/buildDefaultsDF.R | 6 +++--- functions/densifyNetwork.R | 10 +++++----- functions/restructureData.R | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/functions/buildDefaultsDF.R b/functions/buildDefaultsDF.R index 30253a7..118751d 100644 --- a/functions/buildDefaultsDF.R +++ b/functions/buildDefaultsDF.R @@ -19,9 +19,9 @@ buildDefaultsDF <- function(){ "unclassified" , 1 , (50/3.6) , 0 , 600 , 1 , 1 , 1 , 13 , "living_street" , 1 , (40/3.6) , 0 , 300 , 1 , 1 , 1 , 14 , - "cycleway" , 1 , (30/3.6) , 0 , 300 , 1 , 0 , 0 , 15 , - "track" , 1 , (30/3.6) , 0 , 300 , 1 , 1 , 0 , 16 , - "service" , 1 , (40/3.6) , 0 , 300 , 1 , 1 , 1 , 17 , + "service" , 1 , (40/3.6) , 0 , 300 , 1 , 1 , 1 , 15 , + "cycleway" , 1 , (30/3.6) , 0 , 300 , 1 , 0 , 0 , 16 , + "track" , 1 , (30/3.6) , 0 , 300 , 1 , 1 , 0 , 17 , "pedestrian" , 1 , (30/3.6) , 0 , 120 , 0 , 1 , 0 , 18 , "footway" , 1 , (10/3.6) , 0 , 120 , 0 , 1 , 0 , 19 , diff --git a/functions/densifyNetwork.R b/functions/densifyNetwork.R index 8994606..c989bd3 100644 --- a/functions/densifyNetwork.R +++ b/functions/densifyNetwork.R @@ -13,9 +13,9 @@ # "road" , 7 # "unclassified" , 13 # "living_street" , 14 -# "cycleway" , 15 -# "track" , 16 -# "service" , 17 +# "service" , 15 +# "cycleway" , 16 +# "track" , 17 # "pedestrian" , 18 # "footway" , 19 # "path" , 20 @@ -32,9 +32,9 @@ densifyNetwork <- function(networkList, minimum_length=400, densifyBikeways=F){ links_to_segmentize <- links_df %>% filter(length>minimum_length & is_cycle==1) }else{ - # Densifying all except for "cycleway", "footway","motorway","motorway_link","path","pedestrian","pt","steps","track") + # Densifying all except for "cycleway", "footway","motorway","motorway_link","path","pedestrian","corridor","steps","track") links_to_segmentize <- links_df %>% - filter(length>minimum_length & !highway_order%in%c(1,8,15,16,18,19,20,21,22)) + filter(length>minimum_length & !highway_order%in%c(1,8,16,17,18,19,20,21,22)) } links_unsegmented <- links_df %>% diff --git a/functions/restructureData.R b/functions/restructureData.R index 68a5bd2..9ec0668 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -10,12 +10,12 @@ restructureData <- function(networkList, highway_lookup, # finding merged bikepath ids bikepath_uids <- links %>% st_drop_geometry() %>% - filter((cycleway=="4" | cycleway=="5") & highway_order<15) %>% + filter((cycleway == "4" | cycleway == "5") & highway_order < 16) %>% dplyr::select(uid) %>% unlist() %>% as.double() # changing merged bikepaths to regular bikepaths bikepaths <- links %>% filter(uid %in% bikepath_uids) %>% - mutate(highway_order=15) %>% + mutate(highway_order = 16) %>% mutate(freespeed=defaults_df$freespeed[15]) %>% mutate(laneCapacity=defaults_df$laneCapacity[15]) %>% mutate(is_car=0) %>% From 1dc4fec35af79cc58bf44d7e5f2b43ca4173a05c Mon Sep 17 00:00:00 2001 From: Alan Date: Mon, 5 Feb 2024 16:02:45 +1100 Subject: [PATCH 061/103] fixed the issue where some line-segments overlapped, causing their intersections to be lines, which would create gaps in the network. This retains only their endpoints, which removes these gaps --- network.sql | 275 ++++++++++++++++++++++++++++++++++++++++++++++++++ processOSM.sh | 90 +++++++++++++++++ 2 files changed, 365 insertions(+) create mode 100644 network.sql create mode 100644 processOSM.sh diff --git a/network.sql b/network.sql new file mode 100644 index 0000000..24d8bf1 --- /dev/null +++ b/network.sql @@ -0,0 +1,275 @@ +-- turning timing on +\timing +-- transforms the geometries to a projected (i.e, x,y) system and snaps to the +-- nearest metre. Was using GDA2020 (7845), now using MGA Zone 55 (28355) + +-- in case the road has multi-lineStrings +DROP TABLE IF EXISTS roads_split; +CREATE TABLE roads_split AS +SELECT ogc_fid, osm_id, highway, other_tags, + (ST_Dump(geom)).geom AS geom +FROM roads; + + +ALTER TABLE roads_split + ALTER COLUMN geom TYPE geometry(LineString,:v1) + USING ST_SnapToGrid(ST_Transform(geom,:v1),1); +ALTER TABLE roads_points + ALTER COLUMN geom TYPE geometry(Point,:v1) + USING ST_SnapToGrid(ST_Transform(geom,:v1),1); +CREATE INDEX roads_points_gix ON roads USING GIST (geom); + +-- determine if the road segment is a bridge or tunnel +ALTER TABLE roads ADD COLUMN bridge_or_tunnel BOOLEAN; +UPDATE roads + SET bridge_or_tunnel = + CASE WHEN other_tags LIKE '%bridge%' OR other_tags LIKE '%tunnel%' THEN TRUE + ELSE FALSE END; +CREATE INDEX roads_gix ON roads USING GIST (geom); + +-- find the bridge-bridge or road-road intersections +DROP TABLE IF EXISTS line_intersections; +CREATE TABLE line_intersections AS +SELECT a.osm_id AS osm_id_a, b.osm_id AS osm_id_b, + ST_Intersection(a.geom,b.geom) AS geom +FROM roads AS a, roads AS b +WHERE a.osm_id < b.osm_id AND + a.bridge_or_tunnel = b.bridge_or_tunnel AND + ST_Intersects(a.geom, b.geom) = TRUE; + + +-- fixing the cases where the line intersections end up as edges. Here we +-- store their individual endpoints +DROP TABLE IF EXISTS line_intersections2; +CREATE TABLE line_intersections2 AS + SELECT osm_id_a, osm_id_b, geom + FROM line_intersections + WHERE ST_GeometryType(geom) = 'ST_Point' +UNION + SELECT osm_id_a, osm_id_b, ST_StartPoint(geom) AS geom + FROM line_intersections + WHERE ST_GeometryType(geom) = 'ST_LineString' +UNION + SELECT osm_id_a, osm_id_b, ST_EndPoint(geom) AS geom + FROM line_intersections + WHERE ST_GeometryType(geom) = 'ST_LineString'; + + + +-- group the intersections by osm_id +DROP TABLE IF EXISTS line_intersections_grouped; +CREATE TABLE line_intersections_grouped AS +SELECT c.osm_id, st_unaryunion(st_collect(c.geom)) AS geom +FROM + (SELECT a.osm_id_a AS osm_id, a.geom + FROM line_intersections as a + UNION + SELECT b.osm_id_b AS osm_id, b.geom + FROM line_intersections AS b) AS c +GROUP BY osm_id; + +-- take the intersections, buffer them 0.01m, and use them to cut the lines they +-- intersect. We then snap to the nearest metre, ensuring there are no gaps. +-- Only intersections with the same osm_id need to be considered +DROP TABLE IF EXISTS line_cut; +CREATE TABLE line_cut AS +SELECT a.osm_id, +(ST_Dump(ST_SnapToGrid(ST_Difference(a.geom,ST_Buffer(b.geom,0.01)),1))).geom AS geom +FROM roads_split AS a, line_intersections_grouped AS b +WHERE a.osm_id = b.osm_id; + +-- all the osm_ids currently processed. Some segments don't have any +-- intersections so they will need to be added. Adding an index here to speedup +-- processing +DROP TABLE IF EXISTS unique_ids; +CREATE TABLE unique_ids AS +SELECT DISTINCT osm_id +FROM line_cut; +CREATE UNIQUE INDEX osm_id_idx ON unique_ids (osm_id); + +-- adding the remaining road segments +INSERT INTO line_cut +SELECT a.osm_id, a.geom +FROM roads_split AS a, + (SELECT osm_id + FROM roads_split + EXCEPT + SELECT osm_id + FROM unique_ids) AS b +WHERE a.osm_id = b.osm_id; +CREATE INDEX line_cut_gix ON line_cut USING GIST (geom); +ALTER TABLE line_cut ADD COLUMN lid SERIAL PRIMARY KEY; + +-- find all of the road segment endpoints, including the new ones we've added +-- from the intersections +DROP TABLE IF EXISTS endpoints; +CREATE TABLE endpoints AS +SELECT ST_StartPoint(a.geom) as geom +FROM line_cut as a +UNION +SELECT ST_EndPoint(b.geom) as geom +FROM line_cut as b; +CREATE INDEX endpoints_gix ON endpoints USING GIST (geom); + +-- cluster the endpoints to assign each unique endpoint location an id +DROP TABLE IF EXISTS endpoints_clustered; +CREATE TABLE endpoints_clustered AS +SELECT (ST_Dump(a.geom)).geom AS geom +FROM + (SELECT ST_Union(geom) AS geom + FROM endpoints) AS a; +ALTER TABLE endpoints_clustered ADD COLUMN id SERIAL PRIMARY KEY; +CREATE INDEX endpoints_clustered_gix ON endpoints_clustered USING GIST (geom); + +-- most lines will only have 2 endpoints near them, but some will have extra +DROP TABLE IF EXISTS endpoints_near_lines; +CREATE TABLE endpoints_near_lines AS +SELECT c.lid, c.num_endpoints, c.geom +FROM + (SELECT a.lid, COUNT(b.id) AS num_endpoints, + st_unaryunion(st_collect(b.geom)) AS geom + FROM + line_cut AS a, + endpoints_clustered AS b + WHERE + st_intersects(st_buffer(a.geom,0.1),b.geom) + GROUP BY + a.lid + ) AS c +WHERE + c.num_endpoints > 2; + +DROP TABLE IF EXISTS line_cut2; +CREATE TABLE line_cut2 AS +SELECT a.lid, a.osm_id, +(ST_Dump(ST_SnapToGrid(ST_Difference(a.geom,ST_Buffer(b.geom,0.01)),1))).geom AS geom +FROM line_cut AS a, endpoints_near_lines AS b +WHERE a.lid = b.lid; + +DELETE FROM line_cut +WHERE lid IN + (SELECT lid FROM line_cut2); + + +DROP TABLE IF EXISTS line_cut3; +CREATE TABLE line_cut3 AS + SELECT osm_id, geom + FROM line_cut + WHERE lid NOT IN (SELECT lid FROM line_cut2) +UNION + SELECT osm_id, geom + FROM line_cut2; + -- add length +ALTER TABLE line_cut3 ADD COLUMN length INTEGER; +UPDATE line_cut3 SET length = ST_Length(geom); + +-- add from and to id columns +ALTER TABLE line_cut3 ADD COLUMN from_id INTEGER; +ALTER TABLE line_cut3 ADD COLUMN to_id INTEGER; + +-- assign the from and to ids to the road segments +UPDATE line_cut3 AS a + SET from_id = b.id +FROM + endpoints_clustered as b +WHERE + ST_Intersects(ST_StartPoint(a.geom),b.geom); + +UPDATE line_cut3 AS a + SET to_id = b.id +FROM + endpoints_clustered as b +WHERE + ST_Intersects(ST_EndPoint(a.geom),b.geom); + +DELETE FROM line_cut3 +WHERE ST_isEmpty(geom); + + +-- This doesn't seem to be necessary, but will keep this in in case we do need +-- it for other networks. It finds all the unique ids used by the line_cut3 +-- table and builds an index on it. +DROP TABLE IF EXISTS unique_node_ids; +CREATE TABLE unique_node_ids AS +SELECT DISTINCT c.id +FROM + (SELECT DISTINCT a.from_id AS id + FROM line_cut3 AS a + UNION + SELECT DISTINCT b.to_id AS id + FROM line_cut3 AS b) as c; +CREATE UNIQUE INDEX unique_node_ids_idx ON unique_node_ids (id); + +-- filters endpoints_clustered to only have the nodes used in line_cut3 +DROP TABLE IF EXISTS endpoints_filtered; +CREATE TABLE endpoints_filtered AS +SELECT a.id, a.geom +FROM endpoints_clustered AS a, + unique_node_ids AS b +WHERE + a.id = b.id; +CREATE INDEX endpoints_filtered_gix ON endpoints_filtered USING GIST (geom); + +-- the non-spatial data for the osm_id entries present in the network +DROP TABLE IF EXISTS osm_metadata; +CREATE TABLE osm_metadata AS +SELECT osm_id, highway, other_tags +FROM roads +WHERE osm_id IN + (SELECT DISTINCT osm_id FROM line_cut3); + +-- from and to ids of edges that are roundabouts +DROP TABLE IF EXISTS edges_roundabout; +CREATE TABLE edges_roundabout AS +SELECT a.from_id, + a.to_id, + CASE WHEN b.other_tags LIKE '%roundabout%' THEN TRUE + ELSE FALSE + END AS is_roundabout +FROM + line_cut3 AS a, + osm_metadata AS b +WHERE + a.osm_id = b.osm_id; + +-- node ids that connect to roundabouts +DROP TABLE IF EXISTS nodes_roundabout; +CREATE TABLE nodes_roundabout AS +SELECT DISTINCT c.id +FROM + (SELECT DISTINCT a.from_id AS id + FROM edges_roundabout AS a + WHERE is_roundabout = TRUE + UNION + SELECT DISTINCT b.to_id AS id + FROM edges_roundabout AS b + WHERE is_roundabout = TRUE) AS c; + +-- nodes attributed with if they are at roundabouts or traffic signals +DROP TABLE IF EXISTS nodes_attributed; +CREATE TABLE nodes_attributed AS +SELECT c.id, + CASE WHEN c.id IN (SELECT id FROM nodes_roundabout) THEN 1 + ELSE 0 + END AS is_roundabout, + CASE WHEN c.length <= 20 THEN 1 + ELSE 0 + END AS is_signal, + c.geom +FROM + (SELECT + a.id, + a.geom, + ST_Distance(a.geom,b.geom) AS length + FROM + endpoints_filtered AS a + CROSS JOIN LATERAL + (SELECT geom + FROM roads_points + ORDER BY + a.geom <-> geom + LIMIT 1) AS b + ) AS c; + +CREATE INDEX nodes_attributed_gix ON nodes_attributed USING GIST (geom); + diff --git a/processOSM.sh b/processOSM.sh new file mode 100644 index 0000000..b5f0f4c --- /dev/null +++ b/processOSM.sh @@ -0,0 +1,90 @@ +#!/bin/bash + +extract=$1 +crs=$2 +output=$3 + +#extract='./data/melbourne_australia.osm' +#crs=28355 +#output='./data/network3.sqlite' + +# change to the directory this script is located in +cd "$(dirname "$0")" +# extract the roads from the osm file, put in temp.sqlite +ogr2ogr -update -overwrite -nln roads -f "SQLite" -dsco SPATIALITE=YES \ + -dialect SQLite -sql \ + "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ + GEOMETRY FROM lines \ + WHERE (highway IS NOT NULL AND \ + NOT highway = 'bridleway' AND \ + NOT highway = 'bus_stop' AND \ + NOT highway = 'co' AND \ + NOT highway = 'platform' AND \ + NOT highway = 'raceway' AND \ + NOT highway = 'services' AND \ + NOT highway = 'traffic_island' AND \ + highway NOT LIKE '%construction%' AND \ + highway NOT LIKE '%proposed%' AND \ + highway NOT LIKE '%disused%' AND \ + highway NOT LIKE '%abandoned%') AND \ + (other_tags IS NULL OR + (other_tags NOT LIKE '%busbar%' AND \ + other_tags NOT LIKE '%abandoned%' AND \ + other_tags NOT LIKE '%\"access\"=>\"private\"%')) " \ + ./data/temp.sqlite $extract +# highway NOT LIKE '%service%' AND \ +# Removed since some service roads are used as footpaths (e.g., Royal Exhibition +# building) + +# bridleway can be used for walking and cycling (provided you give way to horses +# they are more common in the UK. + +# extract the traffic signals, put in temp.sqlite +ogr2ogr -update -overwrite -nln roads_points -f "SQLite" -dsco SPATIALITE=YES \ + -dialect SQLite -sql \ + "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ + GEOMETRY FROM points \ + WHERE highway LIKE '%traffic_signals%' " \ + ./data/temp.sqlite $extract + +# extract the train and tram lines and add to temp.sqlite +# apparently there are miniature railways +ogr2ogr -update -overwrite -nln pt -f "SQLite" -dialect SQLite -sql \ + "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ + GEOMETRY FROM lines \ + WHERE other_tags LIKE '%railway%' AND \ + other_tags NOT LIKE '%busbar%' AND \ + other_tags NOT LIKE '%abandoned%' AND \ + other_tags NOT LIKE '%miniature%' AND \ + other_tags NOT LIKE '%proposed%' AND \ + other_tags NOT LIKE '%disused%' AND \ + other_tags NOT LIKE '%preserved%' AND \ + other_tags NOT LIKE '%construction%' AND \ + other_tags NOT LIKE '%\"service\"=>\"yard\"%'" \ + ./data/temp.sqlite $extract + +# the postgres database name. +DB_NAME="network_test" + +# Delete the database if it already exists +COMMAND="psql -U postgres -c 'DROP DATABASE ${DB_NAME}' postgres" +eval $COMMAND +# Create the database and add the postgis extension +createdb -U postgres ${DB_NAME} +psql -c 'create extension postgis' ${DB_NAME} postgres + +ogr2ogr -overwrite -lco GEOMETRY_NAME=geom -lco SCHEMA=public -f "PostgreSQL" \ + PG:"host=localhost port=5432 user=postgres dbname=${DB_NAME}" \ + -a_srs "EPSG:4326" ./data/temp.sqlite roads +ogr2ogr -overwrite -lco GEOMETRY_NAME=geom -lco SCHEMA=public -f "PostgreSQL" \ + PG:"host=localhost port=5432 user=postgres dbname=${DB_NAME}" \ + -a_srs "EPSG:4326" ./data/temp.sqlite roads_points + +# run the sql statements +psql -U postgres -d ${DB_NAME} -a -f network.sql -v v1="$crs" + +# extract the nodes, edges, and osm metadata to the network file +ogr2ogr -update -overwrite -f SQLite -dsco SPATIALITE=yes $output PG:"dbname=${DB_NAME} user=postgres" public.line_cut3 -nln edges +ogr2ogr -update -overwrite -f SQLite -update $output PG:"dbname=${DB_NAME} user=postgres" public.nodes_attributed -nln nodes +ogr2ogr -update -overwrite -f SQLite -update $output PG:"dbname=${DB_NAME} user=postgres" public.osm_metadata -nln osm_metadata + From a753aeca3e3d8e46956a259b889b3bda09355a40 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 6 Feb 2024 09:09:23 +1100 Subject: [PATCH 062/103] improve treatment of intersections used to split paths by keeping line endpoints --- functions/processOsm.R | 55 ++++++++++++++++++++++++++++++------------ 1 file changed, 39 insertions(+), 16 deletions(-) diff --git a/functions/processOsm.R b/functions/processOsm.R index 2aa30b5..d622889 100644 --- a/functions/processOsm.R +++ b/functions/processOsm.R @@ -2,8 +2,8 @@ processOsm <- function(osmGpkg, outputCrs) { - # osmGpkg = "../data/processed/bendigo_osm.gpkg" - # osmGpkg = "../data/processed/melbourne_osm.gpkg" + # osmGpkg = "./data/bendigo_osm.gpkg" + # osmGpkg = "./data/melbourne_osm.gpkg" # outputCrs = 7899 # read in OSM data @@ -54,25 +54,48 @@ processOsm <- function(osmGpkg, outputCrs) { # find intersections, but excluding any on different levels as # determined by bridge_tunnel - - # intersection points - intersect paths with a copy of itself, select points, - # keep where bridge/tunnel matches echo("Finding path intersections\n") - system.time( - intersections <- paths %>% - # intersect with itself - produces a separate point for each pair of links - # that intersect at an intersection (and also produces line intersections) - st_intersection(., paths %>% dplyr::select(bridge_tunnel_a = bridge_tunnel)) %>% - # keep just the points + + # intersect paths with a copy of itself - produces point for each pair of links + # that intersect at an intersection (and also produces line intersections) + intersection.base <- paths %>% + st_intersection(., paths %>% + dplyr::select(osm_id_a = osm_id, + bridge_tunnel_a = bridge_tunnel)) + + # point intersections that are on the same level + intersection.points <- intersection.base %>% + # keep just the points where bridge_tunnel match (at grade, both bridge or both tunnel) st_collection_extract("POINT") %>% - # only keep where bridge_tunnel match (at grade, both bridge or both tunnel) - filter(bridge_tunnel == bridge_tunnel_a) %>% - # combine where same location with same osm_id + filter(bridge_tunnel == bridge_tunnel_a) + + # line intersections that are on the same level, and that are not the same osm_id + # - that is, keep where there are two overlapping lines in osm with separate osm_id's + intersection.lines <- intersection.base %>% + st_collection_extract("LINESTRING") %>% + filter(bridge_tunnel == bridge_tunnel_a & + osm_id != osm_id_a) + + # combine the point intersections and the line start/end points, with the + # osm_ids of their paths (used for splitting the lines) + intersections <- bind_rows( + # points + intersection.points, + # line start points (with the osm_id's of their paths) + intersection.lines %>% + st_startpoint() %>% st_sf() %>% st_set_geometry("geom") %>% + cbind(osm_id = intersection.lines$osm_id), + # line end points (with the osm_id's of their paths) + intersection.lines %>% + st_endpoint() %>% st_sf() %>% st_set_geometry("geom") %>% + cbind(osm_id = intersection.lines$osm_id) + ) %>% + # combine where same location with same osm_id group_by(osm_id, geom) %>% summarise() %>% ungroup() - ) - + + # temp dev notes (SP): # (1) compared to network.sql, this only places intersections where both are # bridges, both are tunnels, or both are neither (whereas network.sql, From afbb1527fbc096e079229c8b1ac02441c87e9f43 Mon Sep 17 00:00:00 2001 From: Alan Date: Tue, 6 Feb 2024 09:51:39 +1100 Subject: [PATCH 063/103] Forgot to save the network.sql file. This one will work --- network.sql | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/network.sql b/network.sql index 24d8bf1..355c929 100644 --- a/network.sql +++ b/network.sql @@ -17,22 +17,22 @@ ALTER TABLE roads_split ALTER TABLE roads_points ALTER COLUMN geom TYPE geometry(Point,:v1) USING ST_SnapToGrid(ST_Transform(geom,:v1),1); -CREATE INDEX roads_points_gix ON roads USING GIST (geom); +CREATE INDEX roads_points_gix ON roads_points USING GIST (geom); -- determine if the road segment is a bridge or tunnel -ALTER TABLE roads ADD COLUMN bridge_or_tunnel BOOLEAN; -UPDATE roads +ALTER TABLE roads_split ADD COLUMN bridge_or_tunnel BOOLEAN; +UPDATE roads_split SET bridge_or_tunnel = CASE WHEN other_tags LIKE '%bridge%' OR other_tags LIKE '%tunnel%' THEN TRUE ELSE FALSE END; -CREATE INDEX roads_gix ON roads USING GIST (geom); +CREATE INDEX roads_split_gix ON roads_split USING GIST (geom); -- find the bridge-bridge or road-road intersections DROP TABLE IF EXISTS line_intersections; CREATE TABLE line_intersections AS SELECT a.osm_id AS osm_id_a, b.osm_id AS osm_id_b, ST_Intersection(a.geom,b.geom) AS geom -FROM roads AS a, roads AS b +FROM roads_split AS a, roads_split AS b WHERE a.osm_id < b.osm_id AND a.bridge_or_tunnel = b.bridge_or_tunnel AND ST_Intersects(a.geom, b.geom) = TRUE; @@ -62,10 +62,10 @@ CREATE TABLE line_intersections_grouped AS SELECT c.osm_id, st_unaryunion(st_collect(c.geom)) AS geom FROM (SELECT a.osm_id_a AS osm_id, a.geom - FROM line_intersections as a + FROM line_intersections2 as a UNION SELECT b.osm_id_b AS osm_id, b.geom - FROM line_intersections AS b) AS c + FROM line_intersections2 AS b) AS c GROUP BY osm_id; -- take the intersections, buffer them 0.01m, and use them to cut the lines they From c619f390ba89fb5b628a1f77eb0a5c780dee0752 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 7 Feb 2024 17:51:02 +1100 Subject: [PATCH 064/103] Separate input/output parameters into 'data' and 'output' directories --- NetworkGenerator.R | 8 ++++---- functions/addDestinations.R | 4 ++-- functions/getOsmExtract.R | 4 ++-- functions/processOsm.R | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 9198338..b621992 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -29,8 +29,8 @@ makeNetwork<-function(city, outputFileName = "test"){ if (city == "Bendigo") { region = "./data/greater_bendigo.sqlite" outputCrs = 7899 - osmGpkg = "./data/bendigo_osm.gpkg" - unconfiguredSqlite = "./data/bendigo_network_unconfigured.sqlite" + osmGpkg = "./output/bendigo_osm.gpkg" + unconfiguredSqlite = "./output/bendigo_network_unconfigured.sqlite" cropAreaPoly = "" # must set 'crop2Area=F' # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE # ndviFile = "" # must set 'addNDVI=F' @@ -39,8 +39,8 @@ makeNetwork<-function(city, outputFileName = "test"){ } else if (city == "Melbourne") { region = "./data/greater_melbourne.sqlite" outputCrs = 7899 - osmGpkg = "./data/melbourne_osm.gpkg" - unconfiguredSqlite = "./data/melbourne_network_unconfigured.sqlite" + osmGpkg = "./output/melbourne_osm.gpkg" + unconfiguredSqlite = "./output/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" # demFile = "./data/DEM_melbourne.tif" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" diff --git a/functions/addDestinations.R b/functions/addDestinations.R index af6d847..40bd573 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -14,9 +14,9 @@ addDestinations <- function(nodes_current, # nodes_current = networkDensified[[1]] # edges_current = networkDensified[[2]] - # osmGpkg = "../data/processed/melbourne_osm.gpkg" + # osmGpkg = "./data/melbourne_osm.gpkg" # city = "Melbourne" - # gtfs_feed = "../data/processed/gtfs.zip" + # gtfs_feed = "./data/gtfs.zip" # outputCrs = 28355 # # check keys diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index e8effa7..f7f8e89 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -6,10 +6,10 @@ getOsmExtract <- function(region, osmGpkg, retainDownload) { - # region = "../data/processed/greater_bendigo.sqlite" + # region = "../data/greater_bendigo.sqlite" # outputCrs = 7899 # regionBufferDist = 10000 # 10km - # osmGpkg = "../data/processed/bendigo_osm.gpkg" + # osmGpkg = "../output/bendigo_osm.gpkg" # load region and buffer by selected distance (eg 10km) region <- st_read(region) diff --git a/functions/processOsm.R b/functions/processOsm.R index d622889..f0f51b1 100644 --- a/functions/processOsm.R +++ b/functions/processOsm.R @@ -2,8 +2,8 @@ processOsm <- function(osmGpkg, outputCrs) { - # osmGpkg = "./data/bendigo_osm.gpkg" - # osmGpkg = "./data/melbourne_osm.gpkg" + # osmGpkg = "./output/bendigo_osm.gpkg" + # osmGpkg = "./output/melbourne_osm.gpkg" # outputCrs = 7899 # read in OSM data From dbc5987b10df64a0c97425909c2c7e84692e1919 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 7 Feb 2024 17:56:55 +1100 Subject: [PATCH 065/103] tidy location and logging for output writing --- NetworkGenerator.R | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index b621992..f802515 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -1,8 +1,8 @@ -makeNetwork<-function(city, outputFileName = "test"){ +makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # city = "Bendigo" # city = "Melbourne" - # outputFileName = "network" + # outputSubdirectory = "generated_network" # Parameters -------------------------------------------------------------- @@ -99,10 +99,10 @@ makeNetwork<-function(city, outputFileName = "test"){ analysis_end = as.Date("2019-10-17","%Y-%m-%d") # Transit Feed end date # Outputs - # outputFileName=format(Sys.time(),"%d%b%y_%H%M") # date_hour, eg. "17Aug21_1308" - if(exists("outputFileName")){ - outputFileName=outputFileName - }else{outputFileName="test"} + # outputSubdirectory=format(Sys.time(),"%d%b%y_%H%M") # date_hour, eg. "17Aug21_1308" + if(exists("outputSubdirectory")){ + outputSubdirectory=outputSubdirectory + } else {outputSubdirectory="generated_network"} writeXml=F writeShp=F writeSqlite=T @@ -128,8 +128,8 @@ makeNetwork<-function(city, outputFileName = "test"){ # Building the output folder structure ------------------------------------ - outputDir <- paste0("output/",outputFileName) - if(dir.exists(outputDir)) dir_delete(outputDir) + outputDir <- paste0("output/",outputSubdirectory) + if(outputSubdirectory != "" & dir.exists(outputDir)) dir_delete(outputDir) dir_create(paste0('./',outputDir)) sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) @@ -364,6 +364,9 @@ makeNetwork<-function(city, outputFileName = "test"){ if(writeSqlite) system.time(exportSQlite(networkFinal, outputDir, outputCrs)) if(writeShp) system.time(exportShp(networkFinal, outputDir, outputCrs)) if(writeXml) system.time(exportXML(networkFinal, outputDir)) + + # end logging + sink() } ## JUST FOR TESTING From c19d673b0703772d95e45dd12c432ef0ae81cd0b Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 7 Feb 2024 17:59:26 +1100 Subject: [PATCH 066/103] Update readme documents for new data organisation and preparation --- README.md | 23 +++++----- data/README.md | 67 ++++++++++++++--------------- data/data prep tools.R | 97 ++++++++++++++++++++++++++++++++++++++++++ data/prepare.sh | 56 ------------------------ 4 files changed, 143 insertions(+), 100 deletions(-) create mode 100644 data/data prep tools.R delete mode 100755 data/prepare.sh diff --git a/README.md b/README.md index b5592e1..c55cbc1 100644 --- a/README.md +++ b/README.md @@ -26,16 +26,19 @@ Adjustable parameters are listed under the Parameters sub-heading. Running the algorithm requires an input parameter 'city', and adjustable parameters must be completed for that city, specifying locations of relevant input files and the applicable CRS. If running for a location for which 'city' parameters have not already been defined, then these must be added, using existing city parameters as a template. The city parameters are as follows. -* region - required if OSM extract is to be downloaded, or destinations (see below) are to be extracted. This must be the location of a file in sqlite format which defines the boundary of the area for which the OSM extract is required. -* outputCrs - specify the appropriate EPSG coordinate reference system number for the region. -* osmGpkg - the location to which an OSM extract in .gpkg format will be saved, or where an existing .gpkg file is stored if already held. -* unconfiguredSqlite - the location to which an unconfigured network in .sqlite format will be saved, or where an existing unconfigured network is stored if already held. -* cropAreaPoly - an optional parameter for cropping the OSM extract to a smaller test area. -* demFile - required if 'addElevation' is set to 'T'. This must be the location of a digital elevation model raster file in the same CRS as the the network. -* ndviFile - required if 'addNDVI' is set to 'T'. This must be the location of a raster file with NDVI values in the same CRS as the network. -* gtfs_feed - required if 'addGtfs' or 'addDestinationLayer' is set to 'T'. This must be the location of a zip file containing GTFS data. - -The parameters assume that the relevant files are stored in the a 'data' subdirectory. + +| Parameter | Parameter type | Requirements | +|--------------------|----------------|-----------------------------------------| +| region | .sqlite file | Required if OSM extract is to be downloaded, or destinations (see below) are to be extracted. This must be the location of a file in sqlite format which defines the boundary of the area for which the OSM extract is required.| +| outputCrs | CRS |Specify the appropriate EPSG coordinate reference system number for the region.| +| osmGpkg | .gpkg file | The location to which an OSM extract in .gpkg format will be saved, or where an existing .gpkg file is stored if already held.| +| unconfiguredSqlite | .sqlite file | The location to which an unconfigured network in .sqlite format will be saved, or where an existing unconfigured network is stored if already held.| +| cropAreaPoly | area location | An optional parameter for cropping the OSM extract to a smaller test area. See https://github.com/JamesChevalier/cities/tree/master/australia/victoria for available locations. | +| demFile | .tif file | Required if 'addElevation' is set to 'T'. This must be the location of a digital elevation model raster file in the same CRS as the the network. | +| ndviFile | .tif file | Required if 'addNDVI' is set to 'T'. This must be the location of a raster file with NDVI values in the same CRS as the network. | +| gtfs_feed | gtfs .zip file | Required if 'addGtfs' or 'addDestinationLayer' is set to 'T'. This must be the location of a zip file containing GTFS data. | + +**The parameters assume that the region file and (if used) the demFile, ndviFile and gtfs_feed are stored in the 'data' subdirectory.** See `data/README.md` for more detail on obtaining the data files. The algorithm will do the following: * if 'downloadOsm' is set to 'T', download an OSM extract for the selected 'region' and save it as a .gpkg file. diff --git a/data/README.md b/data/README.md index 3436baf..440c8aa 100644 --- a/data/README.md +++ b/data/README.md @@ -1,37 +1,36 @@ -# Update required +# Network generation data -The information below requires update, once final data location and structure is determined. +This directory contains input files required to generate MATSim networks for Bendigo and Melbourne, which can be found [here](https://osf.io/ajycn/). -# Network generation data +## Files to download + +Download the following files for the relevant network location. + +### Bendigo +| File | Description | +|--------------------------|---------------------------------------------------| +| greater_bendigo.sqlite | Boundary of the Greater Bendigo Local Government Area | +| [to come] | Digital elevation model data for the Greater Bendigo area | +| [to come] | [NDVI] data for the Greater Bendigo area | +| gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | + + +### Melbourne +| File | Description | +|--------------------------|---------------------------------------------------| +| greater_melbourne.sqlite | Boundary of the Greater Melbourne Greater Capital City Statistical Area | +| [to come] | Digital elevation model data for the Greater Melbourne area | +| [to come] | [NDVI] data for the Greater Melbourne area | +| gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | + + +## Other files + +The directory also contains the following other files, from which the region boundary [and DEM] files above were created, using the code contained in `data/data prep tools.R`. That code may also be useful to generate similar data input files for other locations if required. + +| File | Description | +|---------------------------------|--------------------------------------------| +| LGAs.zip | Local government areas of Victoria (VICMAP) | +| GCCSA_2021_AUST_SHP_GDA2020.zip | Greater capital city statistical areas (ABS) | +| [to come] | Digital elevation model data for Victoria | -This directory contains inputs files required to generate a MATSim network for Melbourne, which can be found [here](https://cloudstor.aarnet.edu.au/plus/s/ssLkX8Uez64rV3D). Alternatively, you can use the `./prepare.sh` command as described below to download the data you need. - -## How to populate - -To populate this directory with the required data files, use the `./prepare.sh` with the relevant arguments. Valid arguments and their descriptions are presented in the table below: - -| Argument | Input file | Description | -|----------|------------------------------|-----------------------------------------------| -| -region | studyRegion.sqlite | Greater Melbourne region | -| -osm19 | melbourne.osm | Raw OSM file for Melbourne, 2019 | -| -melb | melbourne.sqlite | Road attributes | -| -net | network.sqlite | non-planar edges and nodes | -| -gtfs19 | gtfs_au_vic_ptv_20191004.zip | GTFS feed - 2019-10-04 | -| -demx10 | DEMx10EPSG28355.tif | Digital Elevation Model data (x10, EPSG28355) | -| -A | all of the above | It Will download all the input files, (~1.2gb)| - -As an example, to start from processing raw OSM (step 1), and generating a network without elevation and public transport, you need to run the following to get the required input: -``` -./prepare.sh -osm19 -``` - -Alternatively, if you want to skip processing raw OSM and start directly from `makeNetwork.sh`, and generate a network that has elevation and PT network from GTFS, you need to run the following to download required inputs: -``` -./prepare.sh -melb -net -gtfs19 -demx10 -``` -If you are not sure about which inputs are required, just simply run the following to download all the inputs: -``` -./prepare.sh -A -``` - -If any issues with the script, please download each required file directly. diff --git a/data/data prep tools.R b/data/data prep tools.R new file mode 100644 index 0000000..72db431 --- /dev/null +++ b/data/data prep tools.R @@ -0,0 +1,97 @@ +# Tools for creating input data files for use in NetworkGenerator +# Currently these tools are applicable for Victoria only + +library(tidyverse) +library(sf) +library(terra) + +# 1 Region boundaries from LGA or GCCSA files ---- +# -----------------------------------------------------------------------------# +# function for extracting specific region from an administrative districts file +getRegion <- function(input.file, input.field, input.name, + output.filename, outputCrs) { + + input.transformed <- input.file %>% + st_transform(outputCrs) + + output <- input.transformed %>% + filter(.data[[input.field]] == input.name) + + st_write(output, paste0("./data/", output.filename, ".sqlite")) +} + +# function to extract zipped shapefile +# Note: 'subpath' is the string between the top zipped file and the ultimate file, eg "/gda2020_vicgrid/esrishape/whole_of_dataset/victoria/VMTRANS" +# 'file' not needed for files that don't have layers (eg shapefiles) if there is only one in the directory +# use 'file' (rather than 'layer') for shapefiles and mapinfo files; use both for gpkg and sqlite +read_zipped_GIS <- function(zipfile, subpath = "", file = NULL, layer = NULL) { + temp <- tempfile() + unzip(zipfile, exdir = temp) + if (is.null(layer)) { + st_read(paste0(temp, subpath, file)) + } else { + st_read(paste0(temp, subpath, file), layer) + } +} + +# Greater Bendigo - from LGAs +getRegion(input.file = read_zipped_GIS(zipfile = "./data/LGAs.zip", + subpath = "/gda2020_vicgrid/esrishape/whole_of_dataset/victoria/VMADMIN"), + input.field = "NAME", input.name = "GREATER BENDIGO", + output.filename = "greater_bendigo", outputCrs = 7899) + + +# Greater Melbourne - from GCCSA +getRegion(input.file = read_zipped_GIS(zipfile = "./data/GCCSA_2021_AUST_SHP_GDA2020.zip"), + input.field = "GCC_NAME21", input.name = "Greater Melbourne", + output.filename = "greater_melbourne", outputCrs = 7899) + + + +# 2 Elevation from whole of state file ---- +# -----------------------------------------------------------------------------# +# function for extracting region's elevation from whole of state file +getRegionDem <- function(dem.location, region.location, + output.filename, outputCrs) { + dem <- rast(dem.location) %>% + project(., outputCrs) + + region <- st_read(region.location) + + dem.cropped <- terra::crop(x = dem, y = region %>% st_buffer(1)) + + writeRaster(dem.cropped, paste0("./data/", output.filename, ".tif"), + gdal = "COMPRESS = DEFLATE", overwrite = TRUE) +} + +# Bendigo +getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", + region.location = "./data/greater_bendigo.sqlite", + output.filename = "DEM_bendigo", outputCrs = 7899) + +# Melbourne +getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", + region.location = "./data/greater_melbourne.sqlite", + output.filename = "DEM_melbourne", outputCrs = 7899) + + + +# from other file +dem <- rast("./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf") %>% + # transform to project crs + project(., "EPSG:28355") + +# load network +network <- st_read("./data/melbourne_network_unconfigured.sqlite") + +# crop dem to network (will crop to bounding box) +dem.network <- terra::crop(x = dem, y = network %>% st_buffer(1)) + +# save output +writeRaster(dem.network, + "./data/DEM_melbourne.tif", + gdal = "COMPRESS = DEFLATE", + overwrite = TRUE) + + + diff --git a/data/prepare.sh b/data/prepare.sh deleted file mode 100755 index df0f454..0000000 --- a/data/prepare.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash -DIR=$(dirname "$0") - -declare -a FILES -isTest=false -while test $# -gt 0 -do - case "$1" in - -region) FILES+=("studyRegion.osm") - ;; - -osm19) FILES+=("melbourne.osm") - ;; - -melb) FILES+=("melbourne.sqlite") - ;; - -net) FILES+=("network.sqlite") - ;; - -gtfs19) FILES+=("gtfs_au_vic_ptv_20191004.zip") - ;; - -demx10) FILES+=("DEMx10EPSG28355.tif") - ;; - -test) isTest=true - ;; - -A) FILES=( - "studyRegion.sqlite" - "melbourne.osm" - "melbourne.sqlite" - "network.sqlite" - "gtfs_au_vic_ptv_20191004.zip" - "DEMx10EPSG28355.tif") - ;; - --*) echo "bad option $1" - ;; - *) echo "Unkown option $1" - ;; - esac - shift -done - -echo "Downlaoding ${FILES[@]} ..." - -for file in ${FILES[*]}; do - from="https://cloudstor.aarnet.edu.au/plus/s/rLTlQJDRixhyan9/download?path=%2F&files=$file" - to="$DIR/$file" - if [ ! -f "$to" ] ; then - CMD="wget -O \"$to\" \"$from\""; echo "$CMD" && eval "$CMD" - else - echo "Found $to so will use it" - fi -done - -if $isTest; then - echo 'dowloading the test area' - from="https://cloudstor.aarnet.edu.au/plus/s/FzDTSZy6t8PBJ2N/download" - to="$DIR/melbourne.osm" - CMD="wget -O \"$to\" \"$from\""; echo "$CMD" && eval "$CMD" -fi From 356aaa11f87f2a9c89c4065c617e61962299d130 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 7 Feb 2024 18:04:14 +1100 Subject: [PATCH 067/103] temporarily remove in-progress work on elevation --- data/data prep tools.R | 46 +----------------------------------------- 1 file changed, 1 insertion(+), 45 deletions(-) diff --git a/data/data prep tools.R b/data/data prep tools.R index 72db431..67dcc84 100644 --- a/data/data prep tools.R +++ b/data/data prep tools.R @@ -50,48 +50,4 @@ getRegion(input.file = read_zipped_GIS(zipfile = "./data/GCCSA_2021_AUST_SHP_GDA # 2 Elevation from whole of state file ---- # -----------------------------------------------------------------------------# -# function for extracting region's elevation from whole of state file -getRegionDem <- function(dem.location, region.location, - output.filename, outputCrs) { - dem <- rast(dem.location) %>% - project(., outputCrs) - - region <- st_read(region.location) - - dem.cropped <- terra::crop(x = dem, y = region %>% st_buffer(1)) - - writeRaster(dem.cropped, paste0("./data/", output.filename, ".tif"), - gdal = "COMPRESS = DEFLATE", overwrite = TRUE) -} - -# Bendigo -getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", - region.location = "./data/greater_bendigo.sqlite", - output.filename = "DEM_bendigo", outputCrs = 7899) - -# Melbourne -getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", - region.location = "./data/greater_melbourne.sqlite", - output.filename = "DEM_melbourne", outputCrs = 7899) - - - -# from other file -dem <- rast("./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf") %>% - # transform to project crs - project(., "EPSG:28355") - -# load network -network <- st_read("./data/melbourne_network_unconfigured.sqlite") - -# crop dem to network (will crop to bounding box) -dem.network <- terra::crop(x = dem, y = network %>% st_buffer(1)) - -# save output -writeRaster(dem.network, - "./data/DEM_melbourne.tif", - gdal = "COMPRESS = DEFLATE", - overwrite = TRUE) - - - +# [to come] From 03030d927000b87a68bd0586ccb09b51fc9b1d7d Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 9 Feb 2024 08:40:58 +1100 Subject: [PATCH 068/103] add elevation file locations and set addElevation to true --- NetworkGenerator.R | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index f802515..e74518f 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -32,7 +32,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ osmGpkg = "./output/bendigo_osm.gpkg" unconfiguredSqlite = "./output/bendigo_network_unconfigured.sqlite" cropAreaPoly = "" # must set 'crop2Area=F' - # demFile = "./data/5m_DEM_reprojected.tif" # MIGHT NOT BE FINAL FILE + demFile = "./data/dem_bendigo.tif" # ndviFile = "" # must set 'addNDVI=F' gtfs_feed = "./data/gtfs.zip" @@ -42,7 +42,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ osmGpkg = "./output/melbourne_osm.gpkg" unconfiguredSqlite = "./output/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" - # demFile = "./data/DEM_melbourne.tif" + demFile = "./data/dem_melbourne.tif" # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" gtfs_feed = "./data/gtfs.zip" @@ -80,7 +80,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # ELEVATION # A flag for whether to add elevation or not - addElevation=F + addElevation=T ElevationMultiplier=1 # DESTINATIONS From 350cc398b22eb4a3e417001e9ce735a2a9788c0d Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 9 Feb 2024 08:41:45 +1100 Subject: [PATCH 069/103] data information and preparation tool for elevation --- data/README.md | 18 +++++++++++------- data/data prep tools.R | 34 +++++++++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 8 deletions(-) diff --git a/data/README.md b/data/README.md index 440c8aa..17c34a9 100644 --- a/data/README.md +++ b/data/README.md @@ -10,7 +10,7 @@ Download the following files for the relevant network location. | File | Description | |--------------------------|---------------------------------------------------| | greater_bendigo.sqlite | Boundary of the Greater Bendigo Local Government Area | -| [to come] | Digital elevation model data for the Greater Bendigo area | +| dem_bendigo.tif | Digital elevation model data for the Greater Bendigo area | | [to come] | [NDVI] data for the Greater Bendigo area | | gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | @@ -19,18 +19,22 @@ Download the following files for the relevant network location. | File | Description | |--------------------------|---------------------------------------------------| | greater_melbourne.sqlite | Boundary of the Greater Melbourne Greater Capital City Statistical Area | -| [to come] | Digital elevation model data for the Greater Melbourne area | +| dem_melbourne.tif | Digital elevation model data for the Greater Melbourne area | | [to come] | [NDVI] data for the Greater Melbourne area | | gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | ## Other files -The directory also contains the following other files, from which the region boundary [and DEM] files above were created, using the code contained in `data/data prep tools.R`. That code may also be useful to generate similar data input files for other locations if required. +The directory also contains the following other files, from which the region boundary files above were created. -| File | Description | -|---------------------------------|--------------------------------------------| -| LGAs.zip | Local government areas of Victoria (VICMAP) | +| File | Description | +|---------------------------------|----------------------------------------------| +| LGAs.zip | Local government areas of Victoria (Vicmap) | | GCCSA_2021_AUST_SHP_GDA2020.zip | Greater capital city statistical areas (ABS) | -| [to come] | Digital elevation model data for Victoria | + +The file `data/data prep tools.R` contains: +* the script used to extract the region boundary files from the LGA and GCCSA files above, and +* the script used to crop the digital elevation files from a DEM file for the whole of Victoria (available for download from https://discover.data.vic.gov.au/dataset/vicmap-elevation-dem-10m, 9.3 GB). +Those scripts may also be useful to generate similar data input files for other locations if required. diff --git a/data/data prep tools.R b/data/data prep tools.R index 67dcc84..1b8d837 100644 --- a/data/data prep tools.R +++ b/data/data prep tools.R @@ -50,4 +50,36 @@ getRegion(input.file = read_zipped_GIS(zipfile = "./data/GCCSA_2021_AUST_SHP_GDA # 2 Elevation from whole of state file ---- # -----------------------------------------------------------------------------# -# [to come] +# Download 10m DEM for Victoria from https://discover.data.vic.gov.au/dataset/vicmap-elevation-dem-10m, +# and manually unzip it into the data folder - about 9.3GB (programmatic unzipping +# may not work for such a large file) + +# function for extracting region's elevation from whole of state file +getRegionDem <- function(dem.location, region.location, regionBufferDist, + output.filename, outputCrs) { + + # read in the raster, and convert to outputCrs if necessary + dem <- rast(dem.location) + if (!same.crs(dem, outputCrs)) dem <- project(dem, outputCrs) + + # region, buffered to selected distance (eg 10km) + region <- st_read(region.location) %>% st_buffer(regionBufferDist) + + # crop the DEM to the region + dem.cropped <- terra::crop(x = dem, y = region %>% st_buffer(1)) + + # write output + writeRaster(dem.cropped, paste0("./data/", output.filename, ".tif"), + gdal = "COMPRESS = DEFLATE", overwrite = TRUE) +} + +# Bendigo +getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", + region.location = "./data/greater_bendigo.sqlite", regionBufferDist = 10000, + output.filename = "dem_bendigo", outputCrs = "EPSG:7899") + +# Melbourne +getRegionDem(dem.location = "./data/vmelev_dem10m_ESRI_grid_GDA94_VicGrid/vmelev_dem10m_ESRI_grid_GDA94_Vicgrid/vmelev_dem10m/dem10m/hdr.adf", + region.location = "./data/greater_melbourne.sqlite", regionBufferDist = 10000, + output.filename = "dem_melbourne", outputCrs = "EPSG:7899") + From 46f639ae48bd2be05b44f47b27ee2284f2587b3c Mon Sep 17 00:00:00 2001 From: StevePem Date: Fri, 9 Feb 2024 17:33:34 +1100 Subject: [PATCH 070/103] update destinations - pseudo entry nodes for parks and schools; separate nodes for each mode --- functions/addDestinations.R | 243 +++++++++++++++++++++++++++++------- functions/getPTStops.R | 6 +- functions/writeOutputs.R | 8 +- 3 files changed, 208 insertions(+), 49 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 40bd573..79f4a93 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -81,6 +81,7 @@ addDestinations <- function(nodes_current, # create tables for points and polygons, allocate unique id's (so features # multiple multiple nodes can be grouped by the id where required), # and store area and location details + echo("Destination point features\n") destination.pt <- bind_rows(destination.layer(points), @@ -93,6 +94,7 @@ addDestinations <- function(nodes_current, centroid_x = st_coordinates(.)[, 1], centroid_y = st_coordinates(.)[, 2]) + echo("Destination polygon features\n") destination.poly <- destination.layer(polygons) %>% filter(st_is_valid(geom)) %>% @@ -117,70 +119,219 @@ addDestinations <- function(nodes_current, # find relevant nodes ---- # For all destinations except parks and schools ('small features'), relevant # node is nearest node to point or to polygon centroid + # For parks and schools ('large features'): # - points are buffered to 50m to create a polygon feature, # - for buffered points and polygons, relevant nodes are all nodes within the # feature and terminal nodes of links within 30m of boundary, or if none, # then nearest node to boundary - # In each case, nodes/links must be cyclable - cyclable.links <- edges_current %>% - filter(is_cycle == 1) - cyclable.nodes <- nodes_current %>% - filter(id %in% cyclable.links$from_id | id %in% cyclable.links$to_id) + # Find nodes available for each mode + links.cycle <- edges_current %>% filter(is_cycle == 1) + nodes.cycle <- nodes_current %>% filter(id %in% links.cycle$from_id | id %in% links.cycle$to_id) - # 'small' destinations - dest.small <- bind_rows(destination.pt, - destination.poly %>% st_centroid()) %>% - filter(!(dest_type %in% c("park", "primary_school", "secondary_school"))) - near_node <- cyclable.nodes$id[st_nearest_feature(dest.small, cyclable.nodes)] - dest.small.with.nodes <- cbind(dest.small %>% st_drop_geometry(), near_node) + links.walk <- edges_current %>% filter(is_walk == 1) + nodes.walk <- nodes_current %>% filter(id %in% links.walk$from_id | id %in% links.walk$to_id) + links.car <- edges_current %>% filter(is_car == 1) + nodes.car <- nodes_current %>% filter(id %in% links.car$from_id | id %in% links.car$to_id) + + # 'small' features + dest.small <- bind_rows(destination.pt, destination.poly) %>% + filter(!(dest_type %in% c("park", "primary_school", "secondary_school"))) + # nearest node of each mode + node_cycle <- nodes.cycle$id[st_nearest_feature(dest.small %>% st_centroid(), nodes.cycle)] + node_walk <- nodes.walk$id[st_nearest_feature(dest.small %>% st_centroid(), nodes.walk)] + node_car <- nodes.car$id[st_nearest_feature(dest.small %>% st_centroid(), nodes.car)] + # join nearest nodes to features + dest.small.with.nodes <- cbind(dest.small, node_cycle, node_walk, node_car) - # 'large' destinations - dest.large <- bind_rows(destination.pt %>% st_buffer(50), - destination.poly) %>% + # 'large' features - points + dest.large.pt <- destination.pt %>% + filter(dest_type %in% c("park", "primary_school", "secondary_school")) + # nearest node of each mode + node_cycle <- nodes.cycle$id[st_nearest_feature(dest.large.pt %>% st_centroid(), nodes.cycle)] + node_walk <- nodes.walk$id[st_nearest_feature(dest.large.pt %>% st_centroid(), nodes.walk)] + node_car <- nodes.car$id[st_nearest_feature(dest.large.pt %>% st_centroid(), nodes.car)] + # join nearest nodes to features + dest.large.pt.with.nodes <- cbind(dest.large.pt, node_cycle, node_walk, node_car) + + # 'large' features - polygons + dest.large.poly <- destination.poly %>% filter(dest_type %in% c("park", "primary_school", "secondary_school")) - # # - nodes within the feature - # dest.large.nodes.within <- dest.large %>% - # st_intersection(., cyclable.nodes %>% dplyr::select(near_node = id)) %>% - # st_drop_geometry() - - # - terminal nodes of links within feature buffered to 30m (will include any - # nodes within feature itself, as their links will fall within the buffered - # feature) - dest.large.found.nodes <- dest.large %>% - st_buffer(30) %>% - st_intersection(., cyclable.links %>% dplyr::select(from_id, to_id)) %>% + echo(paste("Finding entry nodes for", nrow(dest.large.poly), "parks and schools\n")) + + # internal nodes + internal.nodes.cycle <- nodes.cycle %>% + st_intersection(dest.large.poly) %>% st_drop_geometry() %>% - pivot_longer(cols = c("from_id", "to_id"), - names_to = NULL, - values_to = "near_node") %>% - distinct() - - # - nearest node if none within and no links within 30m - dest.large.other <- dest.large %>% - filter(!(dest_id %in% dest.large.found.nodes$dest_id)) - near_node <- cyclable.nodes$id[st_nearest_feature(dest.large.other, cyclable.nodes)] - dest.large.other.nodes <- cbind(dest.large.other %>% st_drop_geometry(), near_node) + dplyr::select(id, dest_id) + internal.nodes.walk <- nodes.walk %>% + st_intersection(dest.large.poly) %>% + st_drop_geometry() %>% + dplyr::select(id, dest_id) + internal.nodes.car <- nodes.car %>% + st_intersection(dest.large.poly) %>% + st_drop_geometry() %>% + dplyr::select(id, dest_id) + + # pseudo entry points + # buffered links + links.cycle.buffered <- st_buffer(links.cycle, 30) + links.walk.buffered <- st_buffer(links.walk, 30) + links.car.buffered <- st_buffer(links.car, 30) + + # setup for parallel processing and progress reporting + cores <- detectCores() + cluster <- parallel::makeCluster(cores) + doSNOW::registerDoSNOW(cluster) + pb <- txtProgressBar(max = nrow(dest.large.poly), style = 3) + progress <- function(n) setTxtProgressBar(pb, n) + opts <- list(progress = progress) + + # report + echo(paste("Finding boundary points for", nrow(dest.large.poly), + "parks and schools; parallel processing with", cores, "cores\n")) + + # loop to find list of boundary points + boundary.points <- + foreach(i = 1:nrow(dest.large.poly), + # foreach(i = 1:8, + # .combine = rbind, + .packages = c("dplyr", "sf"), + .options.snow = opts) %dopar% { + + dest <- dest.large.poly[i,] + + dest.boundary.points <- dest %>% + # convert destination polygons boundaries to linestring + st_cast(to = "MULTILINESTRING") %>% + st_cast(to = "LINESTRING") %>% + # locate points at 20m along boundaries + st_line_sample(., density = units::set_units(20, m)) + + return(dest.boundary.points) + } + + # convert the list of boundary points into a dataframe + # extract coordinates and ids (row numbers, corresponding to dest.large.poly rows) + coordinates <- lapply(boundary.points, function(x) st_coordinates(x[[1]])) + ids <- seq_along(boundary.points) + # make dataframe of ids and x and y coordinates + boundary.df <- data.frame( + id = rep(ids, sapply(coordinates, nrow)), + x = unlist(lapply(coordinates, function(x) x[,1])), + y = unlist(lapply(coordinates, function(x) x[,2]))) + # convert to sf object + boundary.sf <- st_as_sf(boundary.df, coords = c("x", "y")) %>% + st_set_crs(outputCrs) + + # pseudo entry points with nodes + pseudo.entry.nodes.cycle <- boundary.sf %>% + # keep only those within 30m of cyclable links + st_filter(links.cycle.buffered, .predicate = st_intersects) %>% + # nearest nodes to the entry points + mutate(node_cycle = nodes.cycle$id[st_nearest_feature(., nodes.cycle)]) %>% + st_drop_geometry() + + pseudo.entry.nodes.walk <- boundary.sf %>% + # keep only those within 30m of walkable links + st_filter(links.walk.buffered, .predicate = st_intersects) %>% + # nearest nodes to the entry points + mutate(node_walk = nodes.walk$id[st_nearest_feature(., nodes.walk)]) %>% + st_drop_geometry() + + pseudo.entry.nodes.car <- boundary.sf %>% + # keep only those within 30m of driveable links + st_filter(links.car.buffered, .predicate = st_intersects) %>% + # nearest nodes to the entry points + mutate(node_car = nodes.car$id[st_nearest_feature(., nodes.car)]) %>% + st_drop_geometry() + + # loop through large destination polygons, find their internal and pseudo entry + # nodes (and fallback nearest node), and combine + echo(paste("Assembling entry points for", nrow(dest.large.poly), + "parks and schools; parallel processing with", cores, "cores\n")) + + # loop to assemble entry nodes + dest.large.poly.with.nodes <- + foreach(i = 1:nrow(dest.large.poly), + # foreach(i = 1:8, + .combine = rbind, + .packages = c("dplyr"), + .options.snow = opts) %dopar% { + + dest <- dest.large.poly[i,] + + # nodes within features (identifier is 'dest_id') + dest.internal.nodes.cycle <- internal.nodes.cycle %>% + filter(dest_id == dest$dest_id) %>% + .$id + dest.internal.nodes.walk <- internal.nodes.walk %>% + filter(dest_id == dest$dest_id) %>% + .$id + dest.internal.nodes.car <- internal.nodes.car %>% + filter(dest_id == dest$dest_id) %>% + .$id + + # pseudo entry nodes (identifier is 'id', which is row number) + dest.pseudo.entry.nodes.cycle <- pseudo.entry.nodes.cycle %>% + filter(id == i) %>% + .$node_cycle + dest.pseudo.entry.nodes.walk <- pseudo.entry.nodes.walk %>% + filter(id == i) %>% + .$node_walk + dest.pseudo.entry.nodes.car <- pseudo.entry.nodes.car %>% + filter(id == i) %>% + .$node_car + + # combined nodes + entry.nodes.cycle <- unique(c(dest.internal.nodes.cycle, dest.pseudo.entry.nodes.cycle)) + entry.nodes.walk <- unique(c(dest.internal.nodes.walk, dest.pseudo.entry.nodes.walk)) + entry.nodes.car <- unique(c(dest.internal.nodes.car, dest.pseudo.entry.nodes.car)) + + # fallback if no internal or pseudo entry nodes + if (length(entry.nodes.cycle) == 0) { + entry.nodes.cycle <- nodes.cycle$id[st_nearest_feature(dest, nodes.cycle)] + } + if (length(entry.nodes.walk) == 0) { + entry.nodes.walk <- nodes.walk$id[st_nearest_feature(dest, nodes.walk)] + } + if (length(entry.nodes.car) == 0) { + entry.nodes.car <- nodes.car$id[st_nearest_feature(dest, nodes.car)] + } + + # convert the entry nodes to strings, and add to the dest row + dest$node_cycle <- toString(entry.nodes.cycle) + dest$node_walk <- toString(entry.nodes.walk) + dest$node_car <- toString(entry.nodes.car) + + return(dest) + } - # combine the large destinations - dest.large.with.nodes <- bind_rows(dest.large.found.nodes, - dest.large.other.nodes) + # close the progress bar and cluster + close(pb) + stopCluster(cluster) # combine all destinations for output ---- - dest.with.nodes <- bind_rows(dest.small.with.nodes, - dest.large.with.nodes) %>% + dest.with.nodes <- bind_rows(dest.small.with.nodes %>% + mutate(node_cycle = as.character(node_cycle), + node_walk = as.character(node_walk), + node_car = as.character(node_car)), + dest.large.pt.with.nodes %>% + mutate(node_cycle = as.character(node_cycle), + node_walk = as.character(node_walk), + node_car = as.character(node_car)), + dest.large.poly.with.nodes) %>% relocate(dest_id) %>% relocate(dest_type, .after = dest_id) %>% - relocate(near_node, .after = dest_type) %>% - relocate(other_tags, .after = last_col()) %>% - - # and join nodes for locations - left_join(., nodes_current %>% dplyr::select(id), by = c("near_node" = "id")) - + relocate(node_cycle, .after = dest_type) %>% + relocate(node_walk, .after = node_cycle) %>% + relocate(node_car, .after = node_walk) %>% + relocate(other_tags, .after = last_col()) + return(dest.with.nodes) } diff --git a/functions/getPTStops.R b/functions/getPTStops.R index a9f6b0a..9a57a32 100644 --- a/functions/getPTStops.R +++ b/functions/getPTStops.R @@ -9,6 +9,8 @@ getPTStops <- function(city, gtfs_feed, outputCrs, region, regionBufferDist) { # region = "../data/processed/greater_melbourne.sqlite" # regionBufferDist = 10000 + echo("Reading in GTFS data to find public transport stop locations\n") + # read in GTFS feed gtfs <- read_gtfs(gtfs_feed) %>% gtfs_as_sf(., crs = 4326) @@ -41,7 +43,7 @@ getPTStops <- function(city, gtfs_feed, outputCrs, region, regionBufferDist) { } else if (!all(route_types %in% c("0", "1", "2", "3", "4", "5", "6", "7", "11", "12"))) { message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". Unable to process these using the standard route codes from https://developers.google.com/transit/gtfs/reference, which are: - 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12 monorail. + 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12-monorail. Edit getPTStops.R to specify the meanings of the codes used in the GTFS Feed. PT stops will not be included in destinations.") stops.found = FALSE @@ -49,7 +51,7 @@ PT stops will not be included in destinations.") } else { message("GTFS Feed contains the following route type codes: ", paste(route_types, collapse = ", "), ". Using standard route_type codes from https://developers.google.com/transit/gtfs/reference: - 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12 monorail. + 0-tram, 1-metro, 2-train, 3-bus, 4-ferry, 5-cable tram, 6-cable car, 7-funicular, 11-trolleybus, 12-monorail. Adjust 'getPTStops' function if these don't match the codes used in your GTFS feed.") stops.routetypes.coded <- stops.routetypes %>% mutate(pt_stop_type = case_when( diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index e6ecf40..60165cd 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -70,7 +70,13 @@ exportShp <- function(networkFinal, outputDir, outputCrs){ driver = "ESRI Shapefile", layer_options = 'GEOMETRY=AS_XY', delete_layer = T) if (length(networkFinal) > 2) { - st_write(networkFinal[[3]], paste0(shpDir,'/destinations.shp'), + message("When writing destinations to shapefile, long 'other_tags' may be truncated; consider using sqlite instead.") + dest.pt <- st_collection_extract(networkFinal[[3]], type = "POINT") + dest.poly <- st_collection_extract(networkFinal[[3]], type = "POLYGON") + st_write(dest.pt, paste0(shpDir,'/destinations_point.shp'), + driver = "ESRI Shapefile", layer_options = 'GEOMETRY=AS_XY', + delete_layer = T) + st_write(dest.poly, paste0(shpDir,'/destinations_polygon.shp'), driver = "ESRI Shapefile", layer_options = 'GEOMETRY=AS_XY', delete_layer = T) } From ff57489fc2529803e37561fafedc48335cb6f13d Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 12 Feb 2024 12:18:10 +1100 Subject: [PATCH 071/103] convert OSM 'lanes' tags for whole road to 'permlanes' in each direction --- functions/processOsmTags.R | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/functions/processOsmTags.R b/functions/processOsmTags.R index 7ae99d2..f3aca54 100644 --- a/functions/processOsmTags.R +++ b/functions/processOsmTags.R @@ -62,16 +62,6 @@ processOsmTags <- function(osm_df,this_defaults_df){ df$freespeed[1]=freeSpeed } } - if("lanes" %in% keys) { - newLanes=as.integer(values[which(keys=="lanes")]) - # some osm tags set the number of lanes to zero - # added is.na since one of the lanes has a value of "2; 3" - if(!is.na(newLanes) & newLanes > 0) { - # Lane capacity is per lane and should not be adjusted based on number of lanes - # df$laneCapacity[1]= df$laneCapacity[1] * (newLanes/df$permlanes[1]) - df$permlanes[1]=newLanes - } - } df$surface[1]=surface_tags if(any(oneway_tags=="yes")) df$is_oneway[1]=1 @@ -86,6 +76,15 @@ processOsmTags <- function(osm_df,this_defaults_df){ if(any(foot_tags %in% c("yes","designated"))) df$is_walk[1]=1 if(df$cycleway[1]>0 | any(bicycle_tags %in% c("yes","designated"))) df$is_cycle[1]=1 if(any(bicycle_tags %in% "no")) df$is_cycle[1]=0 + + if ("lanes" %in% keys) { + taggedLanes = as.integer(values[which(keys == "lanes")]) + # lanes is number of tagged lanes if one-way, or divide by 2 (rounded up) if two-way + newLanes = ifelse(df$is_oneway[1] == 1, taggedLanes, ceiling(taggedLanes / 2)) + if (!is.na(newLanes) & newLanes > 0) { + df$permlanes[1] = newLanes + } + } } return(df) } From 97de077d4973d36917ee968783ee1fb681e3d46a Mon Sep 17 00:00:00 2001 From: Alan Date: Mon, 12 Feb 2024 17:58:33 +1100 Subject: [PATCH 072/103] Minor change to allow osm_id to flow through to output if present --- functions/restructureData.R | 2 +- processOSM.sh | 16 ---------------- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/functions/restructureData.R b/functions/restructureData.R index 9ec0668..827bca7 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -65,7 +65,7 @@ restructureData <- function(networkList, highway_lookup, mutate(cycleway=ifelse(cycleway==2, "simple_lane" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==1, "shared_street" , cycleway)) %>% mutate(cycleway=ifelse(cycleway==0, NA , cycleway)) %>% - dplyr::select(any_of(c("from_id", "to_id", "fromX", "fromY", "toX", "toY", + dplyr::select(any_of(c("osm_id","from_id", "to_id", "fromX", "fromY", "toX", "toY", "length", "freespeed", "permlanes", "capacity", "highway", "is_oneway", "cycleway", "surface", "is_cycle", "is_walk", "is_car", "modes", "ndvi"))) %>% diff --git a/processOSM.sh b/processOSM.sh index b5f0f4c..366a7c8 100644 --- a/processOSM.sh +++ b/processOSM.sh @@ -47,22 +47,6 @@ ogr2ogr -update -overwrite -nln roads_points -f "SQLite" -dsco SPATIALITE=YES \ WHERE highway LIKE '%traffic_signals%' " \ ./data/temp.sqlite $extract -# extract the train and tram lines and add to temp.sqlite -# apparently there are miniature railways -ogr2ogr -update -overwrite -nln pt -f "SQLite" -dialect SQLite -sql \ - "SELECT CAST(osm_id AS DOUBLE PRECISION) AS osm_id, highway, other_tags, \ - GEOMETRY FROM lines \ - WHERE other_tags LIKE '%railway%' AND \ - other_tags NOT LIKE '%busbar%' AND \ - other_tags NOT LIKE '%abandoned%' AND \ - other_tags NOT LIKE '%miniature%' AND \ - other_tags NOT LIKE '%proposed%' AND \ - other_tags NOT LIKE '%disused%' AND \ - other_tags NOT LIKE '%preserved%' AND \ - other_tags NOT LIKE '%construction%' AND \ - other_tags NOT LIKE '%\"service\"=>\"yard\"%'" \ - ./data/temp.sqlite $extract - # the postgres database name. DB_NAME="network_test" From cdf6d155fa1bddbd72662bb33e6ac7e2be192c29 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:31:34 +1100 Subject: [PATCH 073/103] generalise buffer distance - used for several purposes --- NetworkGenerator.R | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index e74518f..1c55960 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -51,11 +51,14 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ return() } + # REGION BUFFER DISTANCE + # Distance to buffer region when getting osm extract, destinations or gtfs routes + regionBufferDist=10000 + # DOWNLOAD OSM EXTRACT # A flag for whether to download osm extract for the region (if not, and if # network needs to be processed, then must already have osmGpkg file) downloadOsm=T - regionBufferDist=10000 # Distance to buffer region when getting osm extract retainDownload=F # Whether to retain downloaded file after region extracted # NETWORK FROM OSM From f8a05b8722ab020b82224aaac305cd8434346cc6 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:32:20 +1100 Subject: [PATCH 074/103] update GTFS analysis dates --- NetworkGenerator.R | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 1c55960..5959fbf 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -97,9 +97,10 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ ndviBuffDist=30 # GTFS - addGtfs=F - analysis_start = as.Date("2019-10-11","%Y-%m-%d") # Transit Feed start date - analysis_end = as.Date("2019-10-17","%Y-%m-%d") # Transit Feed end date + addGtfs=T + # analysis period is a week [why? gtfs2PtNetwork.R just takes a Wednesday anyway] + analysis_start = as.Date("2023-11-13","%Y-%m-%d") # analysis period start date + analysis_end = as.Date("2023-11-19","%Y-%m-%d") # analysis end date # Outputs # outputSubdirectory=format(Sys.time(),"%d%b%y_%H%M") # date_hour, eg. "17Aug21_1308" From dd8b7a8214b1d9f53c516f19a731b7b1269f6e1e Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:34:43 +1100 Subject: [PATCH 075/103] GTFS using region file, buffered to 'regionBufferDist' --- NetworkGenerator.R | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 5959fbf..fe670ee 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -327,14 +327,14 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # Adjust your analysis start date, end data and gtfs feed name above if(addGtfs) { # Adjust these parameters based on your GTFS file - if(file.exists("data/studyRegion.sqlite")){ + if(file.exists(region)){ # read in the study region boundary - echo("Using Study Region file for GTFS processing") - studyRegion <- st_read("data/studyRegion.sqlite",quiet=T) %>% - st_buffer(10000) %>% + echo("Using Region file for GTFS processing") + studyRegion <- st_read(region, quiet=T) %>% + st_buffer(regionBufferDist) %>% st_snap_to_grid(1) }else{ - echo("Study Region file was not found, skipping") + echo("Region file was not found, skipping") studyRegion = NA } system.time( From ef388951c935adb38ff86ce93f3edf53025e8e46 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:47:01 +1100 Subject: [PATCH 076/103] update locations and other tidying --- functions/gtfs2PtNetwork.R | 54 +++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 23be99c..fa80a53 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -1,18 +1,18 @@ -addGtfsLinks <- function(outputLocation="./test/", +addGtfsLinks <- function(outputLocation, nodes, links, - gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip", - analysis_start = as.Date("2019-10-11","%Y-%m-%d"), - analysis_end = as.Date("2019-10-17","%Y-%m-%d"), + gtfs_feed, + analysis_start, + analysis_end, studyRegion=NA, - outputCrs=outputCrs){ - # outputLocation="./gtfs/" + outputCrs){ + # outputLocation="./output/generated_network/gtfs/" # nodes=networkRestructured[[1]] # links=networkRestructured[[2]] - # gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" - # analysis_start = as.Date("2019-10-11","%Y-%m-%d") - # analysis_end = as.Date("2019-10-17","%Y-%m-%d") - # studyRegion=greaterMelbourne + # gtfs_feed = "./data/gtfs.zip" + # analysis_start = as.Date("2023-11-13","%Y-%m-%d") + # analysis_end = as.Date("2023-11-19","%Y-%m-%d") + # studyRegion=st_read(region) validRoadEdges <- links %>% st_drop_geometry() %>% @@ -25,10 +25,13 @@ addGtfsLinks <- function(outputLocation="./test/", st_set_crs(outputCrs) # process the GTFS feed and export relevant tables into a folder - processGtfs(outputLocation = outputLocation, + processGtfs(outputLocation, networkNodes = validRoadNodes, - studyRegion = studyRegion, - outputCrs = outputCrs) + gtfs_feed, + analysis_start, + analysis_end, + studyRegion, + outputCrs) # read the outputs stops <- st_read(paste0(outputLocation,"stops.sqlite"),quiet=T) stopTimes <- readRDS(paste0(outputLocation,"stopTimes.rds")) @@ -37,9 +40,7 @@ addGtfsLinks <- function(outputLocation="./test/", stopTable <- readRDS(paste0(outputLocation, "stopTable.rds")) # We run into trouble if the geometry column is 'geom' instead of 'GEOMETRY' - if('GEOMETRY'%in%colnames(stops)) { - stops<-stops%>%rename(geom=GEOMETRY) - } + stops <- stops %>% st_set_geometry("geom") # return the edges in the PT network as well as write the # transitVehicles.xml and transitSchedule.xml files @@ -58,16 +59,16 @@ addGtfsLinks <- function(outputLocation="./test/", processGtfs <- function(outputLocation="./test/", networkNodes, - gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip", - analysis_start = as.Date("2019-10-11","%Y-%m-%d"), - analysis_end = as.Date("2019-10-17","%Y-%m-%d"), + gtfs_feed, + analysis_start, + analysis_end, studyRegion=NA, outputCrs){ - # outputLocation="./gtfs/" + # outputLocation="./output/generated_network/gtfs/" # networkNodes = validRoadNodes - # gtfs_feed = "data/gtfs_au_vic_ptv_20191004.zip" - # analysis_start = as.Date("2019-10-11","%Y-%m-%d") - # analysis_end = as.Date("2019-10-17","%Y-%m-%d") + # gtfs_feed = "./data/gtfs.zip" + # analysis_start = as.Date("2023-11-13","%Y-%m-%d") + # analysis_end = as.Date("2023-11-19","%Y-%m-%d") #dir.create(outputLocation, showWarnings = FALSE) @@ -89,6 +90,7 @@ processGtfs <- function(outputLocation="./test/", mutate(service_type="null", service_type=ifelse(agency_id%in%c(3) & route_type%in%c(0), "tram" ,service_type), service_type=ifelse(agency_id%in%c(1,2) & route_type%in%c(1,2),"train",service_type), + # CONSIDER THE LINE BELOW - omitting 5, which is VLINE BUS service_type=ifelse(agency_id%in%c(4,6) & route_type%in%c(3), "bus" ,service_type)) %>% filter(service_type!="null") %>% mutate(route_id=as.factor(route_id)) %>% @@ -359,7 +361,7 @@ exportGtfsSchedule <- function(links, departureTime=departure_time,vehicleRefId,type) %>% as.data.frame() - # Types of vehicles to place in the network + # Types of vehicles to place in the network - TO BE REVIEWED vehicleTypes <- tribble( ~id, ~service_type, ~seats, ~standingRoom, ~length, ~accessTime, ~egressTime, ~passengerCarEquivalents, 1 , "train" , 114 , 206 , 150 , "0.0" , "0.0" , 0.25 , @@ -440,8 +442,7 @@ exportGtfsSchedule <- function(links, transitRoutes<-routeProfile$transitRouteId%>%unique()%>%sort() - # TODO Ask @Alan to check this part: - for (i in 1:length(transitRoutes)) { + for (i in 1:length(transitRoutes)) { # for (i in 1:100) { routeProfileCurrent <- routeProfile[routeProfile$transitRouteId==transitRoutes[i],] departuresCurrent <- departures[departures$transitRouteId==transitRoutes[i],] @@ -540,4 +541,3 @@ exportGtfsSchedule <- function(links, mutate(cycleway=as.character(cycleway)) return(edgesCombined) -} \ No newline at end of file From 586634240a1e49815eb9d4f7e73f37d2ed1e2e9c Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:48:15 +1100 Subject: [PATCH 077/103] allow for dates containing dashes --- functions/gtfs2PtNetwork.R | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index fa80a53..8635bc0 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -74,6 +74,12 @@ processGtfs <- function(outputLocation="./test/", gtfs <- read_gtfs(gtfs_feed) + # if calendar uses integers for dates, convert start and end (eg 2023-11-13 to 20231113) + if (typeof(gtfs$calendar$start_date) == "integer") { + analysis_start <- as.integer(gsub("-", "", analysis_start)) + analysis_end <- as.integer(gsub("-", "", analysis_end)) + } + validCalendar <- gtfs$calendar %>% filter(start_date<=analysis_end & end_date>=analysis_start) %>% filter(wednesday==1) @@ -541,3 +547,4 @@ exportGtfsSchedule <- function(links, mutate(cycleway=as.character(cycleway)) return(edgesCombined) +} From 8a2b4e5e1e1949b8bfe34a509f4b4e6565206861 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 08:49:08 +1100 Subject: [PATCH 078/103] retain departure/arrival times after midnight --- functions/gtfs2PtNetwork.R | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 8635bc0..d1a0679 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -113,10 +113,14 @@ processGtfs <- function(outputLocation="./test/", mutate(trip_id=as.factor(trip_id), stop_id=as.factor(stop_id)) %>% filter(trip_id %in% validTrips$trip_id) %>% - mutate(arrival_time=as.numeric(as_hms(arrival_time)), - departure_time=as.numeric(as_hms(departure_time))) %>% + # arrival and departure times as no. of seconds (keeping even those over 24 hours) + mutate(arrival_time = as.numeric(str_sub(arrival_time, 1, 2)) * 60 * 60 + + as.numeric(str_sub(arrival_time, 4, 5)) * 60 + + as.numeric(str_sub(arrival_time, 7, 8)), + departure_time = as.numeric(str_sub(departure_time, 1, 2)) * 60 * 60 + + as.numeric(str_sub(departure_time, 4, 5)) * 60 + + as.numeric(str_sub(departure_time, 7, 8))) %>% dplyr::select(trip_id,arrival_time,departure_time,stop_id,stop_sequence) %>% - filter(!is.na(arrival_time)) %>% # some of the schedule goes past 24 hours arrange(trip_id,stop_sequence) ) From 6c5c4ba0f59d945074e3f2bb779a0c8d4c72a5fc Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 09:28:02 +1100 Subject: [PATCH 079/103] extend routes to include regional coach (type 5) --- functions/gtfs2PtNetwork.R | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index d1a0679..4908c07 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -96,9 +96,8 @@ processGtfs <- function(outputLocation="./test/", mutate(service_type="null", service_type=ifelse(agency_id%in%c(3) & route_type%in%c(0), "tram" ,service_type), service_type=ifelse(agency_id%in%c(1,2) & route_type%in%c(1,2),"train",service_type), - # CONSIDER THE LINE BELOW - omitting 5, which is VLINE BUS - service_type=ifelse(agency_id%in%c(4,6) & route_type%in%c(3), "bus" ,service_type)) %>% - filter(service_type!="null") %>% + service_type=ifelse(agency_id%in%c(4,5,6) & route_type%in%c(3), "bus" ,service_type)) %>% + filter(service_type!="null") %>% # eg skybus mutate(route_id=as.factor(route_id)) %>% mutate(service_type=as.factor(service_type)) %>% dplyr::select(route_id,service_type) From 0942c19172e1fe262c834dce663813a94c923095 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 09:31:37 +1100 Subject: [PATCH 080/103] avoid error in testing whether studyRegion is NA --- functions/gtfs2PtNetwork.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 4908c07..a117be5 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -134,7 +134,7 @@ processGtfs <- function(outputLocation="./test/", st_snap_to_grid(1) # only want stops within the study region - if(!is.na(studyRegion)){ + if(!is.na(studyRegion) && length(studyRegion) > 0){ message("Cropping to study region") validStops <- validStops %>% filter(lengths(st_intersects(., studyRegion)) > 0) From 5b4c40d145d094976e2e9e3f86b7c18bbcd57135 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 09:57:25 +1100 Subject: [PATCH 081/103] retain trips with minimum of 2 (rather than 3) stops --- functions/gtfs2PtNetwork.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index a117be5..fd45277 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -167,8 +167,8 @@ processGtfs <- function(outputLocation="./test/", inner_join(st_drop_geometry(validStopsSnapped),by="stop_id") %>% # IMPORTANT: this join also removes the stops outside of the region! arrange(trip_id,stop_sequence) %>% group_by(trip_id) %>% - # we want at least 3 stops for each trip - filter(n()>=3) %>% + # we want at least 2 stops for each trip + filter(n() >= 2) %>% # when we use the snapped locations, two sequential stops may be at the same # location. If this is the case, we remove the later stop. # mutate(bad=ifelse(id==lag(id) & row_number()>1,T,F)) %>% From faf00a6d6809284db62eaf863cbcf7aeea40785c Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 10:12:09 +1100 Subject: [PATCH 082/103] tidying echo statements --- NetworkGenerator.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index fe670ee..621dedf 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -329,12 +329,12 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # Adjust these parameters based on your GTFS file if(file.exists(region)){ # read in the study region boundary - echo("Using Region file for GTFS processing") + echo("Using Region file for GTFS processing\n") studyRegion <- st_read(region, quiet=T) %>% st_buffer(regionBufferDist) %>% st_snap_to_grid(1) }else{ - echo("Region file was not found, skipping") + echo("Region file was not found, skipping\n") studyRegion = NA } system.time( From 3639f945a5bb4a5066b6690cf1e0bfb1591097a3 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 14 Feb 2024 10:38:00 +1100 Subject: [PATCH 083/103] tidy default parameters --- functions/gtfs2PtNetwork.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index fd45277..2813e11 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -12,7 +12,7 @@ addGtfsLinks <- function(outputLocation, # gtfs_feed = "./data/gtfs.zip" # analysis_start = as.Date("2023-11-13","%Y-%m-%d") # analysis_end = as.Date("2023-11-19","%Y-%m-%d") - # studyRegion=st_read(region) + # studyRegion=st_read(region, quiet=T) %>% st_buffer(regionBufferDist) %>% st_snap_to_grid(1) validRoadEdges <- links %>% st_drop_geometry() %>% From be5a02b5cb9fa1c3b9ffd26ac3a147d4812cca38 Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 20 Feb 2024 15:33:30 +1100 Subject: [PATCH 084/103] update gtfs for onroad bus running; single analysis date rather than wednesday in given week(s) --- NetworkGenerator.R | 46 +-- functions/gtfs2PtNetwork.R | 612 ++++++++++++++++++++++++++++++------- 2 files changed, 534 insertions(+), 124 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 621dedf..90cca20 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -97,10 +97,11 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ ndviBuffDist=30 # GTFS + # A flag for whether to add a network based on GTFS or not addGtfs=T - # analysis period is a week [why? gtfs2PtNetwork.R just takes a Wednesday anyway] - analysis_start = as.Date("2023-11-13","%Y-%m-%d") # analysis period start date - analysis_end = as.Date("2023-11-19","%Y-%m-%d") # analysis end date + # Select an analysis date, eg a midweek day that's not a public or school holiday + analysis_date=as.Date("2023-11-15","%Y-%m-%d") + onroadBus=T # whether to route buses on roads (rather than create separate pseudo links) # Outputs # outputSubdirectory=format(Sys.time(),"%d%b%y_%H%M") # date_hour, eg. "17Aug21_1308" @@ -129,7 +130,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ library(parallel) library(foreach) library(nngeo) - + library(igraph) # Building the output folder structure ------------------------------------ outputDir <- paste0("output/",outputSubdirectory) @@ -323,37 +324,38 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ networkRestructured[[2]] <- addElevation2Links(networkRestructured) } + # Make network oneway (required because cycling impedances such as level of + # traffic stress and slope may be different in each direction) + echo("Making all links one way\n") + networkOneway <- makeEdgesOneway(networkRestructured[[1]], + networkRestructured[[2]]) + # Adding PT pseudo-network based on GTFS - # Adjust your analysis start date, end data and gtfs feed name above - if(addGtfs) { + # Adjust your analysis date and gtfs feed name above + if (addGtfs) { # Adjust these parameters based on your GTFS file - if(file.exists(region)){ + if (file.exists(region)) { # read in the study region boundary echo("Using Region file for GTFS processing\n") studyRegion <- st_read(region, quiet=T) %>% st_buffer(regionBufferDist) %>% st_snap_to_grid(1) - }else{ + } else { echo("Region file was not found, skipping\n") studyRegion = NA } system.time( - networkRestructured[[2]] <- addGtfsLinks(outputLocation=paste0(outputDir,"/gtfs/"), - nodes=networkRestructured[[1]], - links=networkRestructured[[2]], - gtfs_feed=gtfs_feed, - analysis_start= analysis_start, - analysis_end=analysis_end, - studyRegion=studyRegion, - outputCrs=outputCrs)) + networkOneway[[2]] <- addGtfsLinks(outputLocation = paste0(outputDir,"/gtfs/"), + nodes = networkOneway[[1]], + links = networkOneway[[2]], + gtfs_feed, + analysis_date, + studyRegion, + outputCrs, + onroadBus, + city)) } - # Make network oneway (required because cycling impedances such as level of - # traffic stress and slope may be different in each direction) - echo("Making all links one way\n") - networkOneway <- makeEdgesOneway(networkRestructured[[1]], - networkRestructured[[2]]) - networkFinal <- networkOneway if (addDestinationLayer) { diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 2813e11..d3afdc6 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -2,17 +2,20 @@ addGtfsLinks <- function(outputLocation, nodes, links, gtfs_feed, - analysis_start, - analysis_end, + analysis_date, studyRegion=NA, - outputCrs){ - # outputLocation="./output/generated_network/gtfs/" - # nodes=networkRestructured[[1]] - # links=networkRestructured[[2]] + outputCrs, + onroadBus, + city){ + + # outputLocation = "./output/generated_network/gtfs/" + # nodes = networkOneway[[1]] + # links = networkOneway[[2]] # gtfs_feed = "./data/gtfs.zip" - # analysis_start = as.Date("2023-11-13","%Y-%m-%d") - # analysis_end = as.Date("2023-11-19","%Y-%m-%d") - # studyRegion=st_read(region, quiet=T) %>% st_buffer(regionBufferDist) %>% st_snap_to_grid(1) + # analysis_date = as.Date("2023-11-15","%Y-%m-%d") + # studyRegion = st_read(region, quiet=T) %>% st_buffer(regionBufferDist) %>% st_snap_to_grid(1) + # outputCrs = 7899 + # onroadBus = T validRoadEdges <- links %>% st_drop_geometry() %>% @@ -26,18 +29,26 @@ addGtfsLinks <- function(outputLocation, # process the GTFS feed and export relevant tables into a folder processGtfs(outputLocation, + nodes, + links, networkNodes = validRoadNodes, gtfs_feed, - analysis_start, - analysis_end, + analysis_date, studyRegion, - outputCrs) + outputCrs, + onroadBus) + # read the outputs stops <- st_read(paste0(outputLocation,"stops.sqlite"),quiet=T) stopTimes <- readRDS(paste0(outputLocation,"stopTimes.rds")) trips <- readRDS(paste0(outputLocation,"trips.rds")) routes <- readRDS(paste0(outputLocation,"routes.rds")) stopTable <- readRDS(paste0(outputLocation, "stopTable.rds")) + if (file.exists(paste0(outputLocation, "shape_links.rds"))) { + shape.links <- readRDS(paste0(outputLocation, "shape_links.rds")) + } else { + shape.links <- NA + } # We run into trouble if the geometry column is 'geom' instead of 'GEOMETRY' stops <- stops %>% st_set_geometry("geom") @@ -52,38 +63,44 @@ addGtfsLinks <- function(outputLocation, trips, routes, stopTable, - outputCrs + shape.links, + outputCrs, + onroadBus, + city ) return(edgesCombined) } -processGtfs <- function(outputLocation="./test/", +processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", + nodes, + links, networkNodes, gtfs_feed, - analysis_start, - analysis_end, - studyRegion=NA, - outputCrs){ - # outputLocation="./output/generated_network/gtfs/" + analysis_date, + studyRegion = NA, + outputCrs, + onroadBus){ + + # outputLocation ="./output/generated_network/gtfs/" # networkNodes = validRoadNodes # gtfs_feed = "./data/gtfs.zip" - # analysis_start = as.Date("2023-11-13","%Y-%m-%d") - # analysis_end = as.Date("2023-11-19","%Y-%m-%d") - + # analysis_date = as.Date("2023-11-15","%Y-%m-%d") + #dir.create(outputLocation, showWarnings = FALSE) gtfs <- read_gtfs(gtfs_feed) - # if calendar uses integers for dates, convert start and end (eg 2023-11-13 to 20231113) + analysis_day <- tolower(weekdays(analysis_date)) + + # if calendar uses integers for dates, convert dates (eg 2023-11-15 to 20231115) if (typeof(gtfs$calendar$start_date) == "integer") { - analysis_start <- as.integer(gsub("-", "", analysis_start)) - analysis_end <- as.integer(gsub("-", "", analysis_end)) + analysis_date <- as.integer(gsub("-", "", analysis_date)) } validCalendar <- gtfs$calendar %>% - filter(start_date<=analysis_end & end_date>=analysis_start) %>% - filter(wednesday==1) - + filter(start_date<=analysis_date & end_date>=analysis_date) %>% + filter(.data[[analysis_day]] == 1) + # trips during within the time period validTrips <- gtfs$trips %>% filter(service_id %in% validCalendar$service_id) %>% @@ -123,7 +140,6 @@ processGtfs <- function(outputLocation="./test/", arrange(trip_id,stop_sequence) ) - # stops that have a valid stopTime validStops <- gtfs$stops %>% mutate(stop_id=as.factor(stop_id)) %>% @@ -134,48 +150,108 @@ processGtfs <- function(outputLocation="./test/", st_snap_to_grid(1) # only want stops within the study region - if(!is.na(studyRegion) && length(studyRegion) > 0){ - message("Cropping to study region") + if(!is.na(studyRegion)[1]) { + message("Cropping GTFS to study region") validStops <- validStops %>% filter(lengths(st_intersects(., studyRegion)) > 0) } - # st_write(validStops,"stops.sqlite",delete_layer=TRUE) - # snapping the stops to the nearest node in the road network - networkNodes <- networkNodes %>% - mutate(tmp_id=row_number()) - nearestNodeId <- st_nearest_feature(validStops,networkNodes) + # remove any duplicate stop id's (which shouldn't exist); they may have + # different geometries; just keep the first of any duplicates + validStops <- validStops %>% group_by(stop_id) %>% slice(1) %>% ungroup() + + # add service type, by joining stop times, trips and routes (note: one stop + # may have more than one service, eg combined train and vline bus) + serviceTable <- validStopTimes %>% + dplyr::select(stop_id, trip_id) %>% + distinct() %>% + left_join(., validTrips %>% dplyr::select(trip_id, route_id) %>% distinct(), + by = "trip_id") %>% + left_join(., validRoutes, by = "route_id") %>% + distinct(stop_id, service_type) + validStops <- validStops %>% + left_join(serviceTable, by = "stop_id") + + if (onroadBus & "shapes" %in% names(gtfs)) { + + echo("Finding potential bus stop locations within 100m of GTFS shapes\n") + + # make subnetwork of nodes and links within buffered shapes + shape.subnetwork <- makeShapeSubnetwork(gtfs, + nodes, + links, + validRoutes, + studyRegion, + outputCrs) + shape.nodes <- shape.subnetwork[[1]] + shape.links <- shape.subnetwork[[2]] + + # write shape.links to file for use when finding routes + saveRDS(shape.links, file=paste0(outputLocation, "shape_links.rds")) + + # only shape.nodes are used for snapping bus stops + networkNodesBus <- networkNodes %>% + filter(id %in% shape.nodes$id) + + } else { + + if (onroadBus & !"shapes" %in% names(gtfs)) { + message("No shapes file present in GTFS feed, so unable to convert shapes to routes; will make pseudo links for bus instead") + } + + # all nodes can be used for snapping bus stops + networkNodesBus <- networkNodes + } + + # divide into bus and non-bus (but, if onroadBus = F, they will be processed the same way, + # because 'networkNodesBus' will be the same as 'networkNodes') + validStopsBus <- validStops %>% filter(service_type == "bus") + validStopsOther <- validStops %>% filter(service_type != "bus") + validStopIds <- as.character(c(validStopsBus$stop_id, validStopsOther$stop_id)) + serviceTypes <- as.character(c(validStopsBus$service_type, validStopsOther$service_type)) + + # snapping the stops to the nearest node in the road network + nearestNodeIdBus <- st_nearest_feature(validStopsBus, networkNodesBus) + nearestNodeIdOther <- st_nearest_feature(validStopsOther, networkNodes) + # subsetting the networkNodes and rearranging to match validStops - nearestNode <- networkNodes[nearestNodeId,] + nearestNodeBus <- networkNodesBus[nearestNodeIdBus, ] + nearestNodeOther <- networkNodes[nearestNodeIdOther, ] + nearestNode <- rbind(nearestNodeBus, nearestNodeOther) # calculating the distance from each stop to the nearest node in the road network - distanceToNetwork <- st_distance(validStops,nearestNode,by_element=TRUE) %>% + distanceToNetworkBus <- st_distance(validStopsBus, nearestNodeBus, by_element=TRUE) %>% as.numeric() - + distanceToNetworkOther <- st_distance(validStopsOther, nearestNodeOther, by_element=TRUE) %>% + as.numeric() + distanceToNetwork <- c(distanceToNetworkBus, distanceToNetworkOther) validStopsSnapped <- nearestNode %>% - mutate(stop_id=validStops$stop_id) %>% - mutate(dist=distanceToNetwork) %>% - filter(dist<=1000) %>% - dplyr::select(stop_id,id,x,y) #'stop_id' is the gtfs id, 'id' is the network node id - - - # st_write(validStopsSnapped,paste0(outputLocation,"stopsSnapped.sqlite"),delete_layer=TRUE) - + mutate(stop_id = validStopIds, + service_type = serviceTypes, + dist = distanceToNetwork) %>% + filter(dist <= 1000) %>% + dplyr::select(stop_id, id, x, y, service_type) # 'stop_id' is the gtfs id, 'id' is the network node id + validStopTimesSnapped <- validStopTimes %>% - inner_join(st_drop_geometry(validStopsSnapped),by="stop_id") %>% # IMPORTANT: this join also removes the stops outside of the region! + # join service details + left_join(validTrips %>% dplyr::select(trip_id, route_id) %>% distinct(), + by = "trip_id") %>% + left_join(validRoutes, by = "route_id") %>% + # join stop locations + inner_join(st_drop_geometry(validStopsSnapped), + by = c("stop_id", "service_type")) %>% # IMPORTANT: this join also removes the stops outside of the region! arrange(trip_id,stop_sequence) %>% group_by(trip_id) %>% # we want at least 2 stops for each trip filter(n() >= 2) %>% # when we use the snapped locations, two sequential stops may be at the same # location. If this is the case, we remove the later stop. - # mutate(bad=ifelse(id==lag(id) & row_number()>1,T,F)) %>% filter(id!=lag(id) | row_number()==1) %>% mutate(stop_sequence=row_number()) %>% ungroup() %>% - dplyr::select(trip_id,stop_sequence,arrival_time,departure_time,stop_id,id,x,y) + dplyr::select(trip_id,stop_sequence,arrival_time,departure_time,stop_id,id,x,y,service_type) # some trips will no longer be present validTripsSnapped <- validTrips %>% @@ -188,16 +264,11 @@ processGtfs <- function(outputLocation="./test/", # table of GTFS railway station stop IDs and node ids (for later matching to patronage data) stopTable <- validStopsSnapped %>% st_drop_geometry() %>% - dplyr::select(stop_id, id) %>% - rename(gtfs_stop_id = stop_id, node_id = id) %>% #<<<% + rename(gtfs_stop_id = stop_id, node_id = id) %>% left_join(., gtfs$stops, by = c("gtfs_stop_id" = "stop_id")) %>% - # filter to rows containing 'Railway Station' and not '/' (used for bus or tram stops at stations) - filter(grepl("Railway Station", stop_name) & !grepl("/", stop_name)) %>% - # replace the pattern 'space + Railway + any number of other characters' with nothing - mutate(station_name = gsub(" Railway.*","", stop_name)) %>% - dplyr::select(station_name, gtfs_stop_id, node_id) - - + dplyr::select(gtfs_stop_id, node_id, service_type, stop_name) + # replace stop_id with id (i.e., use the network node id instead of the stop # id provided by the GTFS feed) validStopsSnappedFinal <- validStopsSnapped %>% @@ -206,7 +277,7 @@ processGtfs <- function(outputLocation="./test/", slice_head() %>% ungroup() %>% rename(stop_id=id) - + validStopTimesSnappedFinal <- validStopTimesSnapped %>% dplyr::select(-stop_id) %>% rename(stop_id=id) @@ -228,8 +299,21 @@ exportGtfsSchedule <- function(links, trips, routes, stopTable, - outputCrs){ - + shape.links, + outputCrs, + onroadBus, + city){ + + # flag for whether buses are routed onroad (requires 'shape.links' to be created in 'processGtfs') + if (onroadBus & !is.na(shape.links)[1]) { + onroadBusRouting = T + } else { + if (onroadBus & is.na(shape.links)[1]) { + message("Subnetwork of links for bus routing has not been created; will make pseudo links for bus instead") + } + onroadBusRouting = F + } + # duplicate stopTimes where arrrival time is at or after 24:00:00, so timetable contains early morning entries earlyMorningStopTimes <- stopTimes %>% filter(departure_time >= 86400) %>% # 86400 is the number of seconds in 24 hours @@ -246,29 +330,48 @@ exportGtfsSchedule <- function(links, # combine with early morning stop times stopTimes <- bind_rows(stopTimes, earlyMorningStopTimes) + # if routing buses on road, then find routes between pairs of nodes along the routes + # note - 'shape.links' will not be NA if onroadBus=T and there are gtfs shapes + if (onroadBusRouting) { + system.time(nodePairRoutes <- findNodePairRoutes(stopTimes, + trips, + routes, + shape.links, + existingNodePairs = NA)) + + system.time(unroutedStopOutputs <- removeUnroutedStops(stopTimes, + trips, + routes, + shape.links, + nodePairRoutes)) + stopTimes <- unroutedStopOutputs[[1]] + nodePairRoutes <- unroutedStopOutputs[[2]] + + saveRDS(nodePairRoutes, file=paste0(outputLocation,"nodePairRoutes.rds")) + } + + ### Done to here for both Melbourne and Bendigo # the public transport network ptNetwork <- stopTimes %>% - dplyr::select(trip_id,arrival_time,departure_time,from_id=stop_id,from_x=x,from_y=y, trip_id_orig) %>% - # filter(row_number()<200) %>% + dplyr::select(trip_id,arrival_time,departure_time,from_id=stop_id,from_x=x,from_y=y, trip_id_orig, service_type) %>% group_by(trip_id) %>% mutate(arrivalOffset=arrival_time-min(arrival_time)) %>% mutate(departureOffset=departure_time-min(arrival_time)) %>% + # stop requires a related link; stop can only be served by vehicles driving on that link mutate(to_id=lead(from_id), to_x=lead(from_x), to_y=lead(from_y)) %>% - # the last to node should be the first from node (to make a loop) + # for the last node in the trip, the to_id is the same as the from_id, so the related 'link' is a point mutate(to_id=ifelse(is.na(to_id),from_id,to_id)) %>% mutate(to_x=ifelse(is.na(to_x),from_x,to_x)) %>% mutate(to_y=ifelse(is.na(to_y),from_y,to_y)) %>% - # filter(from_id==to_id) ungroup() %>% mutate(arrivalOffset=as.character(as_hms(arrivalOffset)), departureOffset=as.character(as_hms(departureOffset)), arrival_time=as.character(as_hms(arrival_time)), departure_time=as.character(as_hms(departure_time))) %>% - # join trips and routes, so that service_type (from routes) can be used in stop_id + # join trips, to obtain route_id left_join(., trips, by = c("trip_id_orig" = "trip_id")) %>% - left_join(., routes, by = "route_id") %>% as.data.frame() vehicleTripMatching <- ptNetwork %>% @@ -296,34 +399,66 @@ exportGtfsSchedule <- function(links, mutate(departure_id=formatC(row_number(),digits=0,width=2,flag="0",format="d")) %>% ungroup() %>% dplyr::select(trip_id,route_id_new,departure_id,departure_time=arrival_time) %>% - arrange(route_id_new,departure_id) %>% + # arrange(route_id_new,departure_id) %>% + arrange(route_id_new,departure_time) %>% as.data.frame() - ptNetwork_StopsAndEdges <- ptNetwork %>% + # network of stops + ptNetwork_Stops <- ptNetwork %>% dplyr::select(from_id,to_id,from_x,from_y,to_x,to_y, service_type) %>% distinct() %>% filter(!is.na(to_id)) %>% mutate(geom=paste0("LINESTRING(",from_x," ",from_y,",",to_x," ",to_y,")")) %>% st_as_sf(wkt = "geom", crs = outputCrs) %>% - mutate(stop_id=paste0("Stop ", service_type, " ", formatC(row_number(),digits=0,width=5,flag="0",format="d"))) %>% - mutate(link_id=paste0("pt_",formatC(row_number(),digits=0,width=5,flag="0",format="d"))) + # stop_id naming convention: stop_id is in the form 'mode_x_y', where 'mode' is + # bus, train or tram, 'x' is the node of the stop, and 'y' is the node of the + # next stop (and for the last stop, x and y are the same) + mutate(stop_id = paste0(service_type, "_", from_id, "_", to_id)) + + # add edges to network + # link_id naming convention: for pseudo links: link_id is in the form 'x_y', + # where 'x' is the node of the from_stop, and 'y' is the node of the to_stop + # (and for the last stop, x and y are the same); for on-road links, it's the + # first link in the chain of links between the stops + if (onroadBusRouting) { + ptNetwork_StopsAndEdges <- ptNetwork_Stops %>% + # join the chains of links ('link_ids') between pairs of nodes + left_join(nodePairRoutes, + by = c("from_id" = "stop_id", "to_id" = "next_stop_id")) %>% + # remove any link_ids that aren't bus routes (eg when train and bus both run between same pair) + mutate(link_ids = ifelse(service_type != "bus", NA, link_ids)) %>% + # link id is the first link in the chain of link_ids + rowwise() %>% + mutate(link_id = case_when( + service_type == "bus" & from_id != to_id ~ unlist(str_split(link_ids, ", "))[1], + TRUE ~ paste0(from_id, "_", to_id) + )) %>% + ungroup() + } else { + ptNetwork_StopsAndEdges <- ptNetwork_Stops %>% + mutate(link_id = paste0(from_id, "_", to_id)) + } ptNetworkRoutes <- ptNetwork %>% inner_join(ptNetworkDepartures%>%group_by(route_id_new)%>% slice(which.min(departure_id))%>%dplyr::select(trip_id,route_id_new), by="trip_id") %>% dplyr::select(route_id_new,from_id,to_id,arrivalOffset,departureOffset, service_type) %>% - inner_join(ptNetwork_StopsAndEdges%>%st_drop_geometry()%>%dplyr::select(from_id,to_id,stop_id,link_id,service_type), + inner_join(ptNetwork_StopsAndEdges%>%st_drop_geometry()%>% + dplyr::select(from_id,to_id,stop_id,any_of("link_ids"),link_id,service_type), by=c("from_id","to_id", "service_type")) %>% - dplyr::select(route_id_new,arrivalOffset,departureOffset,stop_id,link_id) + dplyr::select(route_id_new,arrivalOffset,departureOffset,stop_id,link_id,any_of("link_ids"),service_type) - # adding Stop numbers to railway station stop table - ptNetwork_StopsAndEdges_Rail <- ptNetwork_StopsAndEdges %>% - filter(service_type == "train") - + # adding Stop numbers to railway station stop table + nodeStopIds <- ptNetwork_StopsAndEdges %>% + dplyr::select(from_id, stop_id) %>% + st_drop_geometry() %>% + distinct() %>% + group_by(from_id) %>% + summarise(stop_ids_for_node = paste(stop_id, collapse = ", ")) %>% + ungroup() stopTable <- stopTable %>% - left_join(., ptNetwork_StopsAndEdges_Rail, by = c("node_id" = "from_id")) %>% - dplyr::select(station_name, gtfs_stop_id, node_id, stop_id) + left_join(nodeStopIds, by = c("node_id" = "from_id")) write.csv(stopTable, file=paste0(outputLocation, "stopTable.csv")) @@ -350,7 +485,7 @@ exportGtfsSchedule <- function(links, # * route is the same as the refID column since we use a direct line between each stop. routeProfile <- ptNetworkRoutes %>% dplyr::select(transitRouteId=route_id_new, refId=stop_id,arrivalOffset, - departureOffset,linkRefId=link_id) + departureOffset,linkRefId=link_id, any_of("link_ids"),service_type) # ./data/transitSchedule.xml: transitSchedule > transitRoute > departures # vehicleRefId is just the trip_id. This means we can potentially have a @@ -358,7 +493,8 @@ exportGtfsSchedule <- function(links, departures <- ptNetworkDepartures %>% # mutate(departure_time=as.character(as.hms(departure_time))) %>% left_join(vehicles, by=c("trip_id"="id")) %>% - arrange(route_id_new,departure_id,service_type) %>% + # arrange(route_id_new,departure_id,service_type) %>% + arrange(route_id_new,departure_time,service_type) %>% group_by(service_type) %>% mutate(type=NA, type=ifelse(service_type=="train",1,type), @@ -367,7 +503,7 @@ exportGtfsSchedule <- function(links, mutate(vehicleRefId=paste0(service_type,"_",formatC(row_number(),digits=0,width=5,flag="0",format="d"))) %>% ungroup() %>% dplyr::select(transitRouteId=route_id_new,departureId=departure_id, - departureTime=departure_time,vehicleRefId,type) %>% + departureTime=departure_time,vehicleRefId,type,serviceType=service_type) %>% as.data.frame() # Types of vehicles to place in the network - TO BE REVIEWED @@ -443,7 +579,7 @@ exportGtfsSchedule <- function(links, if (i%%50==0 || i==nrow(transitStops)) printProgress(i,nrow(transitStops),' transitStops') } cat(paste0(" \n"),file=outxml,append=TRUE) - cat(paste0(" \n"),file=outxml,append=TRUE) + cat(paste0(" \n"),file=outxml,append=TRUE) echo("writing vehicleTripMatching\n") str<-"" @@ -451,14 +587,14 @@ exportGtfsSchedule <- function(links, transitRoutes<-routeProfile$transitRouteId%>%unique()%>%sort() - for (i in 1:length(transitRoutes)) { + for (i in 1:length(transitRoutes)) { # for (i in 1:100) { routeProfileCurrent <- routeProfile[routeProfile$transitRouteId==transitRoutes[i],] departuresCurrent <- departures[departures$transitRouteId==transitRoutes[i],] if(nrow(routeProfileCurrent)>1){ # I added this to drop those empty route profiles str<-paste0(str," \n") str<-paste0(str," ",departuresCurrent[1,]$type,"\n") - str<-paste0(str," pt\n") + str<-paste0(str," ",departuresCurrent[1,]$serviceType,"\n") ### HERE str<-paste0(str," \n") for (j in 1:nrow(routeProfileCurrent)) { @@ -493,9 +629,24 @@ exportGtfsSchedule <- function(links, str<-paste0(str," \n") str<-paste0(str," \n") for (j in 1:nrow(routeProfileCurrent)) { - str<-paste0(str," \n") + if (onroadBusRouting) { + if (!is.na(routeProfileCurrent[j,]$link_ids)) { + link_ids <- unlist(str_split(routeProfileCurrent[j,]$link_ids, ", ")) + for (k in 1:length(link_ids)) { + str<-paste0(str," \n") + } + } else { + str<-paste0(str," \n") + } + } else { + str<-paste0(str," \n") + } } str<-paste0(str," \n") @@ -523,31 +674,288 @@ exportGtfsSchedule <- function(links, cat(paste0(" \n"),file=outxml,append=TRUE) cat(paste0("\n"),file=outxml,append=TRUE) - - - # routeProfile, stop - # route, link - # departures, departure - ptNetworkMATSim <- ptNetwork_StopsAndEdges %>% + # edges to be added to road network + if (onroadBusRouting) { + ptBaseEdges <- ptNetwork_StopsAndEdges %>% + # remove bus segments where onroad links are used (that is, where link_ids are present) + filter(is.na(link_ids)) + } else { + ptBaseEdges <- ptNetwork_StopsAndEdges + } + ptNetworkMATSim <- ptBaseEdges %>% mutate(length=round(as.numeric(st_length(.)),3)) %>% mutate(length=ifelse(length<1,1,length)) %>% - mutate(highway="pt") %>% + mutate(highway=service_type) %>% mutate(freespeed=11.1) %>% mutate(permlanes=1) %>% mutate(capacity=600) %>% mutate(is_oneway=1) %>% mutate(cycleway=NA) %>% + mutate(surface=NA) %>% + mutate(slope_pct=NA) %>% mutate(is_cycle=0) %>% mutate(is_walk=0) %>% mutate(is_car=0) %>% - mutate(modes="pt") %>% - dplyr::select(id=link_id, from_id, to_id, fromX=from_x, fromY=from_y, toX=to_x, toY=to_y, - length, freespeed, permlanes, capacity, highway, is_oneway, - cycleway, is_cycle, is_walk, is_car, modes) + mutate(modes=service_type) %>% + mutate(link_id = max(links$link_id) + row_number()) %>% + dplyr::select(from_id, to_id, fromx=from_x, fromy=from_y, tox=to_x, toy=to_y, + length, freespeed, permlanes, capacity, highway, cycleway, + surface, is_cycle, is_walk, is_car, modes, slope_pct, link_id) edgesCombined <- bind_rows(links,ptNetworkMATSim) %>% - st_sf() %>% - mutate(cycleway=as.character(cycleway)) + st_sf() + + # add 'bus' to modes for onroad buses + if (onroadBusRouting) { + buslinks <- c() + for (i in 1:nrow(nodePairRoutes)) { + row.links <- nodePairRoutes$link_ids[i] %>% + str_split(., ", ") %>% + unlist() %>% + as.numeric() + buslinks <- c(buslinks, row.links) + } + buslinks <- unique(buslinks) + edgesCombined <- edgesCombined %>% + mutate(modes = ifelse(link_id %in% buslinks, paste0(modes, ",bus"), modes)) + } return(edgesCombined) } + + +# function to make subnetwork of nodes and links falling within a 50m buffer of +# 'shapes' in the GTFS feed used on valid routes - only used where onroadBus = T +makeShapeSubnetwork <- function(gtfs, + nodes, + links, + validRoutes, + studyRegion = NA, + outputCrs) { + + # convert shapes to sf, and filter to bus + shapes <- gtfs_as_sf(gtfs) %>% + .$shapes %>% + st_transform(outputCrs) %>% + st_snap_to_grid(1) + + # limit to study area (note - this crops shapes at edge of study area) + if (!is.na(studyRegion)[1]) { + shapes <- shapes %>% + st_intersection(., studyRegion %>% st_geometry()) + } + + # filter to bus shapes only + bus.shapes <- shapes %>% + # bus only + left_join(gtfs$trips %>% dplyr::select(shape_id, route_id) %>% distinct(), + by = "shape_id") %>% + left_join(validRoutes, by = "route_id") %>% + filter(service_type == "bus") + + # buffer shapes to 100m - routes and stops must be within this buffer + buffered.shapes <- bus.shapes %>% + st_buffer(., 100) %>% + summarise() + + # select links and nodes within the buffered shapes, and that meet the following tests: + # - for links - must be driveable, so buses can traverse them + # - for nodes - must be accessible by car, bike and walking (but this filter will be done when snapping stops) + intersecting.nodes <- nodes %>% # all nodes + dplyr::select(id) %>% + st_filter(buffered.shapes, .predicate = st_intersects) + + shape.links <- links %>% + filter(from_id %in% intersecting.nodes$id & to_id %in% intersecting.nodes$id) %>% + # must be driveable + filter(is_car == 1) %>% + # must not be dead end + filter(from_id %in% .$to_id & to_id %in% .$from_id) + + # make a graph of the shape links, and remove any small sections (< 10 nodes) + g <- graph_from_data_frame(shape.links %>% + dplyr::select(from_id, to_id), + directed = T) + components <- components(g) + component_sizes <- components$csize + small_components <- which(component_sizes < 10) + # nodes in the small components + small_component_nodes <- c() + for (i in 1:length(small_components)) { + small_component_nodes <- c(small_component_nodes, + V(g)[which(components$membership == small_components[i])] %>% + as_ids()) + } + # remove links with a from_id or to_id in the small components + shape.links <- shape.links %>% + filter(!from_id %in% small_component_nodes | !to_id %in% small_component_nodes) + + shape.nodes <- nodes %>% + filter(id %in% shape.links$from_id | id %in% shape.links$to_id) + + return(list(shape.nodes, shape.links)) + +} + + +# function to find route between pairs of nodes representing adjacent stops on +# a bus route - only used where onroadBus = T +findNodePairRoutes <- function(stopTimes, + trips, + routes, + shape.links, + existingNodePairs = NA) { + + nodePairs <- stopTimes %>% + # filter to bus + filter(trip_id %in% (trips %>% + left_join(routes, by = "route_id") %>% + filter(service_type == "bus") %>% + .$trip_id)) %>% + # add next stop ID if it's the same trip (note - these 'stop_ids' are node id's not gtfs id's) + mutate(next_stop_id = ifelse(trip_id == lead(trip_id), + lead(stop_id), NA)) %>% + distinct(stop_id, next_stop_id) %>% + filter(!is.na(next_stop_id)) + + # remove any that have already been found + if (!is.na(existingNodePairs)[1]) { + nodePairs <- nodePairs %>% + filter(!(paste0(stop_id, "_", next_stop_id) %in% + (existingNodePairs %>% + mutate(node_pair = paste0(stop_id, "_", next_stop_id)) %>% + .$node_pair))) + + } + + if (nrow(nodePairs) > 0) { + # directed graph of shape nodes and links + shape.g <- + graph_from_data_frame(shape.links %>% + dplyr::select(from_id, to_id, weight = length, link_id), + # dplyr::select(from_id, to_id, weight = length/freespeed, link_id), # time rather than length + directed = T) + + # setup for parallel processing and progress reporting + cores <- detectCores() + cluster <- parallel::makeCluster(cores) + doSNOW::registerDoSNOW(cluster) + pb <- txtProgressBar(max = max(nrow(nodePairs), 2), style = 3) + progress <- function(n) setTxtProgressBar(pb, n) + opts <- list(progress = progress) + + # report + echo(paste("Finding routes for", nrow(nodePairs), "pairs of bus stop nodes;", + "parallel processing with", cores, "cores\n")) + + # loop to find list of boundary points + nodePairRoutes <- + foreach(i = 1:nrow(nodePairs), + # foreach(i = 21280:21300, + .combine = rbind, + .packages = c("dplyr", "sf", "igraph"), + .options.snow = opts) %dopar% { + + row <- nodePairs[i, ] + + shortest <- shortest_paths(shape.g, + from = as.character(row$stop_id), + to = as.character(row$next_stop_id), + mode = "out", + output = "epath") + + if (length(shortest$epath[[1]]) > 0) { + shortest_link_ids <- edge_attr(shape.g, "link_id", shortest$epath[[1]]) %>% + toString() + } else { + shortest_link_ids <- NA + } + + output.row <- cbind(row, link_ids = shortest_link_ids) %>% + as.data.frame() + + return(output.row) + } + + # close the progress bar and cluster + close(pb) + stopCluster(cluster) + + } else { + nodePairRoutes <- c() + } + + return(nodePairRoutes) + +} + +# function to remove unrouted bus stops from stopTimes - only used where onroadBus = T +removeUnroutedStops <- function(stopTimes, + trips, + routes, + shape.links, + nodePairRoutes) { + + # find missing pair routes + missingPairRoutes <- nodePairRoutes %>% filter(is.na(link_ids)) + + # loop to remove stops where unrouted + counter = 0 + while (nrow(missingPairRoutes) > 0) { + + echo(paste("Removing stops for", nrow(missingPairRoutes), "pairs of stops for which routes can't be found\n")) + + stopTimes <- stopTimes %>% + mutate(missing = 0) + + # identify stops to which a route can't be found - first time, end of the + # missing link; second time, start of the missing link, and so on + for (i in 1:nrow(missingPairRoutes)) { + if (counter %% 2 == 0) { + stopTimes <- stopTimes %>% + mutate(missing = ifelse((stop_id == missingPairRoutes$next_stop_id[i] & + lag(stop_id) == missingPairRoutes$stop_id[i] & + trip_id == lag(trip_id)), 1, missing)) + } else { + stopTimes <- stopTimes %>% + mutate(missing = ifelse((stop_id == missingPairRoutes$stop_id[i] & + lead(stop_id) == missingPairRoutes$next_stop_id[i] & + trip_id == lead(trip_id)), 1, missing)) + } + } + + # remove where missing and re-do stop sequences + stopTimes <- stopTimes %>% + filter(missing != 1) %>% + group_by(trip_id) %>% + # remove any with fewer than 2 stops for the trip + filter(n() >= 2) %>% + # stop sequence + mutate(stop_sequence = row_number()) %>% + ungroup() + + # remove the missing pairs from nodePairRoutes + nodePairRoutes <- nodePairRoutes %>% + filter(!is.na(link_ids)) + + # find routes for new node pairs + newNodePairs <- findNodePairRoutes(stopTimes, + trips, + routes, + shape.links, + existingNodePairs = nodePairRoutes) + + # add new pairs to output, and recalculate missing pair routes + nodePairRoutes <- rbind(nodePairRoutes, newNodePairs) + missingPairRoutes <- nodePairRoutes %>% filter(is.na(link_ids)) + + # increment counter + counter = counter + 1 + } + + if ("missing" %in% names(stopTimes)) { + stopTimes <- stopTimes %>% dplyr::select(-missing) + } + + return(list(stopTimes, nodePairRoutes)) +} From cf619d7ac8fdc48ec30161e9a4a4e5124c0cb69d Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 21 Feb 2024 20:01:29 +1100 Subject: [PATCH 085/103] Update to activate NDVI functionality --- NetworkGenerator.R | 12 ++++++------ data/README.md | 6 ++++-- functions/addNDVI.R | 11 ++++++----- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 90cca20..0978886 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -21,8 +21,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # (only supported for Victoria at this stage) # • demFile: if 'addElevation=T', digital elevation model raster file (must be # in same coordinate system as network) - # • ndviFile: if 'addNDVI=T', raster file with NDVI values (must be in same - # coordinate system as network) + # • ndviFile: if 'addNDVI=T', raster file with NDVI values # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) @@ -33,7 +32,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ unconfiguredSqlite = "./output/bendigo_network_unconfigured.sqlite" cropAreaPoly = "" # must set 'crop2Area=F' demFile = "./data/dem_bendigo.tif" - # ndviFile = "" # must set 'addNDVI=F' + ndviFile = "./data/NDVI_Bendigo_2023.tif" gtfs_feed = "./data/gtfs.zip" } else if (city == "Melbourne") { @@ -43,7 +42,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ unconfiguredSqlite = "./output/melbourne_network_unconfigured.sqlite" cropAreaPoly = "city-of-melbourne_victoria" demFile = "./data/dem_melbourne.tif" - # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + ndviFile = "./data/NDVI_Melbourne_2023.tif" gtfs_feed = "./data/gtfs.zip" } else { @@ -92,7 +91,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # NDVI # A flag for whether to add NDVI or not - addNDVI=F + addNDVI=T # Buffer distance for finding average NDVI for links ndviBuffDist=30 @@ -284,7 +283,8 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ if(addNDVI) { system.time(networkDensified[[2]] <- addNDVI2Links(networkDensified[[2]], ndviFile, - ndviBuffDist)) + ndviBuffDist, + outputCrs)) } # adding destinations layer diff --git a/data/README.md b/data/README.md index 17c34a9..bc9753a 100644 --- a/data/README.md +++ b/data/README.md @@ -11,7 +11,7 @@ Download the following files for the relevant network location. |--------------------------|---------------------------------------------------| | greater_bendigo.sqlite | Boundary of the Greater Bendigo Local Government Area | | dem_bendigo.tif | Digital elevation model data for the Greater Bendigo area | -| [to come] | [NDVI] data for the Greater Bendigo area | +| NDVI_Bendigo_2023.tif | NDVI data for the Greater Bendigo area | | gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | @@ -20,7 +20,7 @@ Download the following files for the relevant network location. |--------------------------|---------------------------------------------------| | greater_melbourne.sqlite | Boundary of the Greater Melbourne Greater Capital City Statistical Area | | dem_melbourne.tif | Digital elevation model data for the Greater Melbourne area | -| [to come] | [NDVI] data for the Greater Melbourne area | +| NDVI_Melbourne_2023.tif | NDVI data for the Greater Melbourne area | | gtfs.zip | GTFS feed for Victoria as at 20 October 2023 | @@ -38,3 +38,5 @@ The file `data/data prep tools.R` contains: * the script used to extract the region boundary files from the LGA and GCCSA files above, and * the script used to crop the digital elevation files from a DEM file for the whole of Victoria (available for download from https://discover.data.vic.gov.au/dataset/vicmap-elevation-dem-10m, 9.3 GB). Those scripts may also be useful to generate similar data input files for other locations if required. + +The NDVI files were created from Sentinel 2 data using Google Earth Engine at https://code.earthengine.google.com/. diff --git a/functions/addNDVI.R b/functions/addNDVI.R index b730c7e..d02634a 100644 --- a/functions/addNDVI.R +++ b/functions/addNDVI.R @@ -1,17 +1,18 @@ # function to add NDVI to links, where NDVI is the average of the NDVI # values within a 30m buffer of the links -addNDVI2Links <- function(links, ndviFile, ndviBuffDist) { +addNDVI2Links <- function(links, ndviFile, ndviBuffDist, outputCrs) { # links = networkDensified[[2]] - # ndviFile = "./data/NDVI_1600mBuffer_Melbourne_reprojected.tif" + # ndviFile = "./data/NDVI_Bendigo_2023.tif" # ndviBuffDist = 30 - echo("Reading in the NDVI file\n") + echo("Reading in the NDVI file and reprojecting if necessary\n") - # read in NDVI file + # read in NDVI file, and convert to outputCrs if necessary ndvi <- rast(ndviFile) - + if (!same.crs(ndvi, outputCrs)) ndvi <- project(ndvi, outputCrs) + # buffer each link links.buffered <- st_buffer(links, 30) From 966b4bda04348e6e7c2c523de4093a22970184a1 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 21 Feb 2024 20:02:30 +1100 Subject: [PATCH 086/103] Update instructions for using GTFS --- NetworkGenerator.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 0978886..8e47227 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -23,7 +23,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # in same coordinate system as network) # • ndviFile: if 'addNDVI=T', raster file with NDVI values # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing - # GTFS data (and, if 'addGtfs=T', also set start and end dates in GTFS section) + # GTFS data (and, if 'addGtfs=T', also set analysis date in GTFS section) if (city == "Bendigo") { region = "./data/greater_bendigo.sqlite" From 5b0acb6c400aaba8a591721042ad4dcd7f9ee864 Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 26 Feb 2024 14:29:45 +1100 Subject: [PATCH 087/103] correct reprojection of ndvi raster --- functions/addNDVI.R | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/functions/addNDVI.R b/functions/addNDVI.R index d02634a..de16c5f 100644 --- a/functions/addNDVI.R +++ b/functions/addNDVI.R @@ -11,7 +11,8 @@ addNDVI2Links <- function(links, ndviFile, ndviBuffDist, outputCrs) { # read in NDVI file, and convert to outputCrs if necessary ndvi <- rast(ndviFile) - if (!same.crs(ndvi, outputCrs)) ndvi <- project(ndvi, outputCrs) + outputCrsEPSG <- paste0("EPSG:", outputCrs) + if (!same.crs(ndvi, outputCrsEPSG)) ndvi <- project(ndvi, outputCrsEPSG) # buffer each link links.buffered <- st_buffer(links, 30) @@ -22,7 +23,7 @@ addNDVI2Links <- function(links, ndviFile, ndviBuffDist, outputCrs) { # 2 columns, ID (which is the row number from links.buffered) and NDVI ndvi_values <- terra::extract(ndvi, links.buffered) - echo(paste("Finding mean pf NDVI values for each link\n")) + echo(paste("Finding mean of NDVI values for each link\n")) # find the mean of the values for each link ndvi_values_mean <- ndvi_values %>% From ec112c5cd75af5001f8b8ed952bd41d025c4479e Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 26 Feb 2024 14:30:49 +1100 Subject: [PATCH 088/103] flow multiple ndvi values (if present) through to final output --- functions/makeEdgesOneway.R | 5 +++-- functions/restructureData.R | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/functions/makeEdgesOneway.R b/functions/makeEdgesOneway.R index b6e4bb9..56c96aa 100644 --- a/functions/makeEdgesOneway.R +++ b/functions/makeEdgesOneway.R @@ -46,8 +46,9 @@ makeEdgesOneway <- function(nodes_current, edges_current) { if ("slope_pct" %in% colnames(edges_twoway_reversed)) { required_fields <- c(required_fields, "slope_pct") } - if ("ndvi" %in% colnames(edges_twoway_reversed)) { - required_fields <- c(required_fields, "ndvi") + ndvi_columns <- colnames(edges_twoway_reversed)[grep("ndvi", colnames(edges_twoway_reversed))] + if (length(ndvi_columns) > 0) { + required_fields <- c(required_fields, ndvi_columns) } edges_twoway_reversed <- edges_twoway_reversed %>% dplyr::select(all_of(required_fields)) diff --git a/functions/restructureData.R b/functions/restructureData.R index 827bca7..3e27fe6 100644 --- a/functions/restructureData.R +++ b/functions/restructureData.R @@ -68,7 +68,8 @@ restructureData <- function(networkList, highway_lookup, dplyr::select(any_of(c("osm_id","from_id", "to_id", "fromX", "fromY", "toX", "toY", "length", "freespeed", "permlanes", "capacity", "highway", "is_oneway", "cycleway", "surface", - "is_cycle", "is_walk", "is_car", "modes", "ndvi"))) %>% + "is_cycle", "is_walk", "is_car", "modes")), + contains("ndvi")) %>% mutate(id=NA) %>% relocate(id) From 34c14a5196959dc080a9e6641ab02f5b5d7ebeed Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 26 Feb 2024 14:31:23 +1100 Subject: [PATCH 089/103] flow osm_id thorugh to final output --- functions/makeEdgesOneway.R | 3 +++ 1 file changed, 3 insertions(+) diff --git a/functions/makeEdgesOneway.R b/functions/makeEdgesOneway.R index 56c96aa..e1afc20 100644 --- a/functions/makeEdgesOneway.R +++ b/functions/makeEdgesOneway.R @@ -46,6 +46,9 @@ makeEdgesOneway <- function(nodes_current, edges_current) { if ("slope_pct" %in% colnames(edges_twoway_reversed)) { required_fields <- c(required_fields, "slope_pct") } + if ("osm_id" %in% colnames(edges_twoway_reversed)) { + required_fields <- c(required_fields, "osm_id") + } ndvi_columns <- colnames(edges_twoway_reversed)[grep("ndvi", colnames(edges_twoway_reversed))] if (length(ndvi_columns) > 0) { required_fields <- c(required_fields, ndvi_columns) From 9497a82e77a69676180be5ba49934795f4e3f14c Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 26 Feb 2024 14:32:39 +1100 Subject: [PATCH 090/103] elevation: switch library from raster to terror; add support for reprojection --- NetworkGenerator.R | 4 ++-- functions/addElevation.R | 24 ++++++++++++++++++++---- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 8e47227..71a37ed 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -118,7 +118,6 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ library(tidyverse) library(data.table) library(igraph) - library(raster) library(terra) library(lwgeom) library(tidytransit) @@ -320,7 +319,8 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ if(addElevation){ networkRestructured[[1]] <- addElevation2Nodes(networkRestructured[[1]], demFile, - ElevationMultiplier) + ElevationMultiplier, + outputCrs) networkRestructured[[2]] <- addElevation2Links(networkRestructured) } diff --git a/functions/addElevation.R b/functions/addElevation.R index 697b013..89cfe0e 100644 --- a/functions/addElevation.R +++ b/functions/addElevation.R @@ -1,8 +1,24 @@ -addElevation2Nodes <- function(nodes, rasterFile, multiplier=1){ - elevation <- raster(rasterFile) - nodes$z <- round(raster::extract(elevation ,as(nodes, "Spatial"),method='bilinear'))/multiplier +addElevation2Nodes <- function(nodes, rasterFile, multiplier=1, outputCrs){ + + # elevation <- raster(rasterFile) + # nodes$z <- round(raster::extract(elevation ,as(nodes, "Spatial"),method='bilinear'))/multiplier + + # read in dem file, and convert to outputCrs if necessary + elevation <- rast(rasterFile) + outputCrsEPSG <- paste0("EPSG:", outputCrs) + if (!same.crs(elevation, outputCrsEPSG)) elevation <- project(elevation, outputCrsEPSG) + + # find elevation values, and add to nodes as 'z' field + elevation.values <- round(terra::extract(elevation, nodes, method='bilinear', ID = FALSE)) / multiplier + names(elevation.values) <- "z" + elevation.values <- elevation.values %>% + mutate(z = ifelse(is.nan(z), NA, z)) %>% + .$z + + nodes$z <- elevation.values + return(nodes) -} +} addElevation2Links <- function(network){ # network <- networkRestructured From 399858d0d131583816a89686c2a2085adaa8abfe Mon Sep 17 00:00:00 2001 From: StevePem Date: Mon, 26 Feb 2024 16:47:03 +1100 Subject: [PATCH 091/103] increase timeout on OSM extract download, to allow for large file/slow connection --- functions/getOsmExtract.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index f7f8e89..e13c9d8 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -17,7 +17,7 @@ getOsmExtract <- function(region, # increase timeout to allow time for large Australia extract to download default.timeout <- getOption("timeout") - options(timeout = 1200) + options(timeout = 3600) # download the full extract (whole of Australia; quite slow) download.url <- oe_match(region, crs = outputCrs)$url From 82883fff5bada0d55cf1bbf2fa2748dcb8a94a86 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:06:22 +1100 Subject: [PATCH 092/103] change convention for naming PT pseudo links to ensure no duplicates --- functions/gtfs2PtNetwork.R | 36 ++++++++++++++++++++---------------- functions/writeOutputs.R | 7 ++++++- 2 files changed, 26 insertions(+), 17 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index d3afdc6..bf5cda4 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -416,28 +416,30 @@ exportGtfsSchedule <- function(links, mutate(stop_id = paste0(service_type, "_", from_id, "_", to_id)) # add edges to network - # link_id naming convention: for pseudo links: link_id is in the form 'x_y', - # where 'x' is the node of the from_stop, and 'y' is the node of the to_stop - # (and for the last stop, x and y are the same); for on-road links, it's the - # first link in the chain of links between the stops + # link_id naming convention: for pseudo links: link_id the service type (eg 'train_'); + # plus a row number; for on-road links, it's the first link in the chain of links between the stops if (onroadBusRouting) { ptNetwork_StopsAndEdges <- ptNetwork_Stops %>% # join the chains of links ('link_ids') between pairs of nodes left_join(nodePairRoutes, by = c("from_id" = "stop_id", "to_id" = "next_stop_id")) %>% - # remove any link_ids that aren't bus routes (eg when train and bus both run between same pair) + # remove any chains that aren't bus routes (eg when train and bus both run between same pair) mutate(link_ids = ifelse(service_type != "bus", NA, link_ids)) %>% - # link id is the first link in the chain of link_ids + # link id: service type plus an identifying number + mutate(link_id = paste0(service_type, "_", + formatC(row_number(), digits=0, width=5, flag="0", format="d"))) %>% + # update link id for buses: first link in the chain rowwise() %>% - mutate(link_id = case_when( - service_type == "bus" & from_id != to_id ~ unlist(str_split(link_ids, ", "))[1], - TRUE ~ paste0(from_id, "_", to_id) - )) %>% + mutate(link_id = if_else(service_type == "bus" & from_id != to_id, + unlist(str_split(link_ids, ", "))[1], + link_id)) %>% ungroup() - } else { + } else { ptNetwork_StopsAndEdges <- ptNetwork_Stops %>% - mutate(link_id = paste0(from_id, "_", to_id)) - } + # link id - service type plus an identifying number + mutate(link_id = paste0(service_type, "_", + formatC(row_number(), digits=0, width=5, flag="0", format="d"))) + } ptNetworkRoutes <- ptNetwork %>% inner_join(ptNetworkDepartures%>%group_by(route_id_new)%>% @@ -674,7 +676,7 @@ exportGtfsSchedule <- function(links, cat(paste0(" \n"),file=outxml,append=TRUE) cat(paste0("\n"),file=outxml,append=TRUE) - # edges to be added to road network + # edges to be added to road network if (onroadBusRouting) { ptBaseEdges <- ptNetwork_StopsAndEdges %>% # remove bus segments where onroad links are used (that is, where link_ids are present) @@ -682,6 +684,7 @@ exportGtfsSchedule <- function(links, } else { ptBaseEdges <- ptNetwork_StopsAndEdges } + ptNetworkMATSim <- ptBaseEdges %>% mutate(length=round(as.numeric(st_length(.)),3)) %>% mutate(length=ifelse(length<1,1,length)) %>% @@ -697,10 +700,11 @@ exportGtfsSchedule <- function(links, mutate(is_walk=0) %>% mutate(is_car=0) %>% mutate(modes=service_type) %>% - mutate(link_id = max(links$link_id) + row_number()) %>% + mutate(id = link_id) %>% # the link_id created above + mutate(link_id = max(links$link_id) + row_number()) %>% # row-number link_id, consistent with road links dplyr::select(from_id, to_id, fromx=from_x, fromy=from_y, tox=to_x, toy=to_y, length, freespeed, permlanes, capacity, highway, cycleway, - surface, is_cycle, is_walk, is_car, modes, slope_pct, link_id) + surface, is_cycle, is_walk, is_car, modes, slope_pct, link_id, id) edgesCombined <- bind_rows(links,ptNetworkMATSim) %>% st_sf() diff --git a/functions/writeOutputs.R b/functions/writeOutputs.R index 60165cd..c8fd4ff 100644 --- a/functions/writeOutputs.R +++ b/functions/writeOutputs.R @@ -207,7 +207,12 @@ exportXML <- function(networkFinal, outputDir){ links <- fncols(links, c("id","osm_id", "highway", "cycleway","slope", "bicycleInfrastructureSpeedFactor")) links <- links %>% - mutate(id = ifelse(is.na(id),row_number(),id)) %>% + # mutate(id = ifelse(is.na(id),row_number(),id)) %>% + mutate(id = case_when( + is.na(id) & "link_id" %in% names(links) ~ as.character(link_id), + is.na(id) ~ as.character(row_number()), + TRUE ~ as.character(id)) + ) %>% mutate(type = replace(highway, is.na(highway), "NotSpecified")) %>% mutate(surface = ifelse(is.na(surface),"asphalt",surface)) %>% mutate(cycleway = replace(cycleway, is.na(cycleway),"No")) %>% From f4cdd97b19a8035f3a4d45630770820730bee2be Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:07:47 +1100 Subject: [PATCH 093/103] update file locations --- functions/getOsmExtract.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index e13c9d8..4bdbe89 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -6,10 +6,10 @@ getOsmExtract <- function(region, osmGpkg, retainDownload) { - # region = "../data/greater_bendigo.sqlite" + # region = "./data/greater_bendigo.sqlite" # outputCrs = 7899 # regionBufferDist = 10000 # 10km - # osmGpkg = "../output/bendigo_osm.gpkg" + # osmGpkg = "./output/temp_bendigo_osm.gpkg" # load region and buffer by selected distance (eg 10km) region <- st_read(region) From 40cb9dc06e06758995e35b1c3ed262b280b6f05d Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:08:13 +1100 Subject: [PATCH 094/103] increase timeout to allow for slow download or large OSM extract --- functions/getOsmExtract.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index 4bdbe89..7a6cc36 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -17,7 +17,7 @@ getOsmExtract <- function(region, # increase timeout to allow time for large Australia extract to download default.timeout <- getOption("timeout") - options(timeout = 3600) + options(timeout = 7200) # download the full extract (whole of Australia; quite slow) download.url <- oe_match(region, crs = outputCrs)$url From c9f688664eb2c8566813244bf50a4e6c7f2d45f1 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:09:06 +1100 Subject: [PATCH 095/103] crop OSM extract to buffered region (rather than merely its bounding box) and improve invalid OSM feature detection --- functions/getOsmExtract.R | 46 ++++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index 7a6cc36..af0e0bc 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -13,7 +13,8 @@ getOsmExtract <- function(region, # load region and buffer by selected distance (eg 10km) region <- st_read(region) - region.buffer <- st_buffer(region, regionBufferDist) + region.buffer <- st_buffer(region, regionBufferDist) %>% + st_snap_to_grid(1) # increase timeout to allow time for large Australia extract to download default.timeout <- getOption("timeout") @@ -24,19 +25,52 @@ getOsmExtract <- function(region, echo(paste("Downloading OSM extract from", download.url, "\n")) full.extract <- oe_download(download.url, download_directory = ".") - # convert to gpkg, including all layers + # convert to gpkg, including all layers ('boundary' will clip to bounding box) echo(paste("Converting downloaded OSM extract to .gpkg for selected region\n")) region.gpkg <- oe_vectortranslate(full.extract, layer = st_layers(full.extract)$name, vectortranslate_options = c("-t_srs", paste0("EPSG:", outputCrs)), - boundary = region.buffer) - - # save to permanent location + boundary = region.buffer, + boundary_type = "spat") # 'spat' should intersect rather than clip, but still seems to clip + + # intersect with region buffer, eliminating errors and save to permanent location for (i in 1:length(st_layers(region.gpkg)$name)) { current.layer.name <- st_layers(region.gpkg)$name[i] - st_write(st_read(region.gpkg, layer = current.layer.name), + current.layer <- st_read(region.gpkg, layer = current.layer.name) %>% + filter(st_is_valid(geometry)) + + # some features have validity problems that cause intersection issues + # (and may not be detected by st_is_valid) - find and remove them + problem.features <- c() + + # create current.layer.intersected if possible, or else identify problem features + tryCatch({ + current.layer.intersected <- current.layer %>% + st_filter(region.buffer, .predicate = st_intersects) + }, error = function(e) { + # if error, loop through the layer by feature and identify problem + message(paste("Problem features detected in OSM layer", current.layer.name, + "; removing problems (may take a while)")) + for (j in 1:nrow(current.layer)) { + tryCatch({ + current.feature.intersected <- current.layer[j,] %>% + st_filter(region.buffer, .predicate = st_intersects) + }, error = function(e) { + problem.features <<- c(problem.features, j) # <<- modifies variable in parent environment + }) + } + }) + + # if problem features found, remove from current layer and create current.layer.intersected + if (length(problem.features) > 0) { + current.layer <- current.layer[-problem.features,] + current.layer.intersected <- current.layer %>% + st_filter(region.buffer, .predicate = st_intersects) + } + + st_write(current.layer.intersected, osmGpkg, layer = current.layer.name, delete_layer = TRUE) From 4fd90992cfb8588af0e428d0f4697b7a8fee1934 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:10:09 +1100 Subject: [PATCH 096/103] ensure only destinations in buffered region are returned --- functions/addDestinations.R | 13 +++++++++---- functions/getPTStops.R | 6 ++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 79f4a93..022c0c0 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -43,6 +43,10 @@ addDestinations <- function(nodes_current, # read in the layers points <- oe_read(osmGpkg, query = paste(extra.tag.string, "FROM points"), quiet = TRUE) polygons <- oe_read(osmGpkg, query = paste(extra.tag.string, "FROM multipolygons"), quiet = TRUE) + + # read in and buffer the region + study.area <- st_buffer(st_read(region), regionBufferDist) %>% + st_snap_to_grid(1) # function to extract specific destination types from point or polygon layers ---- # ----------------------------------# @@ -86,13 +90,13 @@ addDestinations <- function(nodes_current, bind_rows(destination.layer(points), # add PT stops (from GTFS feed) to point table - getPTStops(city, gtfs_feed, outputCrs, region, regionBufferDist) %>% + getPTStops(city, gtfs_feed, outputCrs, study.area) %>% mutate(dest_type = "pt_stop")) %>% - mutate(dest_id = row_number(), area_m2 = 0, centroid_x = st_coordinates(.)[, 1], - centroid_y = st_coordinates(.)[, 2]) + centroid_y = st_coordinates(.)[, 2]) %>% + st_filter(study.area, .predicate = st_intersects) echo("Destination polygon features\n") destination.poly <- @@ -101,7 +105,8 @@ addDestinations <- function(nodes_current, mutate(dest_id = max(destination.pt$dest_id) + row_number(), area_m2 = as.numeric(st_area(.)), centroid_x = st_coordinates(st_centroid(.))[, 1], - centroid_y = st_coordinates(st_centroid(.))[, 2]) + centroid_y = st_coordinates(st_centroid(.))[, 2]) %>% + st_filter(study.area, .predicate = st_intersects) # # check numbers of each destination type diff --git a/functions/getPTStops.R b/functions/getPTStops.R index 9a57a32..a624f07 100644 --- a/functions/getPTStops.R +++ b/functions/getPTStops.R @@ -2,12 +2,11 @@ # requires tidytransit (loaded in NetworkGenerator.R) -getPTStops <- function(city, gtfs_feed, outputCrs, region, regionBufferDist) { +getPTStops <- function(city, gtfs_feed, outputCrs, study.area) { # city = "Melbourne" # gtfs_feed = "../data/processed/gtfs.zip" # outputCrs = 7899 - # region = "../data/processed/greater_melbourne.sqlite" - # regionBufferDist = 10000 + # study.area = st_buffer(st_read("./data/processed/greater_melbourne.sqlite"), 10000) echo("Reading in GTFS data to find public transport stop locations\n") @@ -16,7 +15,6 @@ getPTStops <- function(city, gtfs_feed, outputCrs, region, regionBufferDist) { gtfs_as_sf(., crs = 4326) # extract stops with their locations, filtered to study area - study.area <- st_buffer(st_read(region), regionBufferDist) stops <- gtfs$stops %>% st_transform(outputCrs) %>% st_set_geometry("geom") %>% From 4e15760410a6d9776e8568e31f1c6278443e4e23 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:12:25 +1100 Subject: [PATCH 097/103] remove unneeded comment --- functions/gtfs2PtNetwork.R | 1 - 1 file changed, 1 deletion(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index bf5cda4..4d465ba 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -350,7 +350,6 @@ exportGtfsSchedule <- function(links, saveRDS(nodePairRoutes, file=paste0(outputLocation,"nodePairRoutes.rds")) } - ### Done to here for both Melbourne and Bendigo # the public transport network ptNetwork <- stopTimes %>% dplyr::select(trip_id,arrival_time,departure_time,from_id=stop_id,from_x=x,from_y=y, trip_id_orig, service_type) %>% From fc37cbe473e5b4b2b77ab6382b75c15828f516b1 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:13:14 +1100 Subject: [PATCH 098/103] remove or comment out saving intermediate processing outputs --- functions/gtfs2PtNetwork.R | 62 ++++++++++++++++++-------------------- 1 file changed, 30 insertions(+), 32 deletions(-) diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 4d465ba..1f96b16 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -27,28 +27,24 @@ addGtfsLinks <- function(outputLocation, filter(id %in% validRoadIds) %>% st_set_crs(outputCrs) - # process the GTFS feed and export relevant tables into a folder - processGtfs(outputLocation, - nodes, - links, - networkNodes = validRoadNodes, - gtfs_feed, - analysis_date, - studyRegion, - outputCrs, - onroadBus) - - # read the outputs - stops <- st_read(paste0(outputLocation,"stops.sqlite"),quiet=T) - stopTimes <- readRDS(paste0(outputLocation,"stopTimes.rds")) - trips <- readRDS(paste0(outputLocation,"trips.rds")) - routes <- readRDS(paste0(outputLocation,"routes.rds")) - stopTable <- readRDS(paste0(outputLocation, "stopTable.rds")) - if (file.exists(paste0(outputLocation, "shape_links.rds"))) { - shape.links <- readRDS(paste0(outputLocation, "shape_links.rds")) - } else { - shape.links <- NA - } + # process the GTFS feed + processedGtfs <- processGtfs(outputLocation, + nodes, + links, + networkNodes = validRoadNodes, + gtfs_feed, + analysis_date, + studyRegion, + outputCrs, + onroadBus) + + # unpack the outputs + stops <- processedGtfs[[1]] + stopTimes <- processedGtfs[[2]] + trips <- processedGtfs[[3]] + routes <- processedGtfs[[4]] + stopTable <- processedGtfs[[5]] + shape.links <- processedGtfs[[6]] # We run into trouble if the geometry column is 'geom' instead of 'GEOMETRY' stops <- stops %>% st_set_geometry("geom") @@ -187,8 +183,8 @@ processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", shape.nodes <- shape.subnetwork[[1]] shape.links <- shape.subnetwork[[2]] - # write shape.links to file for use when finding routes - saveRDS(shape.links, file=paste0(outputLocation, "shape_links.rds")) + # # write shape.links to file for use when finding routes + # saveRDS(shape.links, file=paste0(outputLocation, "shape_links.rds")) # only shape.nodes are used for snapping bus stops networkNodesBus <- networkNodes %>% @@ -198,10 +194,11 @@ processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", if (onroadBus & !"shapes" %in% names(gtfs)) { message("No shapes file present in GTFS feed, so unable to convert shapes to routes; will make pseudo links for bus instead") - } + } # all nodes can be used for snapping bus stops networkNodesBus <- networkNodes + shape.links <- NA } # divide into bus and non-bus (but, if onroadBus = F, they will be processed the same way, @@ -282,12 +279,13 @@ processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", dplyr::select(-stop_id) %>% rename(stop_id=id) - # writing the exports to file - st_write(validStopsSnappedFinal,paste0(outputLocation,"stops.sqlite"),delete_layer=T) - saveRDS(validStopTimesSnappedFinal, file=paste0(outputLocation,"stopTimes.rds")) - saveRDS(validTripsSnapped, file=paste0(outputLocation,"trips.rds")) - saveRDS(validRoutesSnapped, file=paste0(outputLocation,"routes.rds")) - saveRDS(stopTable, file=paste0(outputLocation, "stopTable.rds")) + # return the exports as a list + return(list(validStopsSnappedFinal, + validStopTimesSnappedFinal, + validTripsSnapped, + validRoutesSnapped, + stopTable, + shape.links)) } @@ -347,7 +345,7 @@ exportGtfsSchedule <- function(links, stopTimes <- unroutedStopOutputs[[1]] nodePairRoutes <- unroutedStopOutputs[[2]] - saveRDS(nodePairRoutes, file=paste0(outputLocation,"nodePairRoutes.rds")) + # saveRDS(nodePairRoutes, file=paste0(outputLocation,"nodePairRoutes.rds")) } # the public transport network From ccf6876e52f701a6d74a24df58e47cb5ef92c881 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:13:53 +1100 Subject: [PATCH 099/103] update comment re DEM file projection --- NetworkGenerator.R | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 71a37ed..65627b6 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -19,8 +19,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ # • cropAreaPoly: if 'crop2TestArea=T' cropArea location from # https://github.com/JamesChevalier/cities/tree/master/australia/victoria # (only supported for Victoria at this stage) - # • demFile: if 'addElevation=T', digital elevation model raster file (must be - # in same coordinate system as network) + # • demFile: if 'addElevation=T', digital elevation model raster file # • ndviFile: if 'addNDVI=T', raster file with NDVI values # • gtfs_feed: if 'addGtfs=T' or 'addDestinationLayer=T, zip file containing # GTFS data (and, if 'addGtfs=T', also set analysis date in GTFS section) From 269c8877366cd11d1d5c03834d1bbfddc736b872 Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 16:29:04 +1100 Subject: [PATCH 100/103] update location of pt outputs from 'gtfs' to 'pt' --- NetworkGenerator.R | 2 +- functions/gtfs2PtNetwork.R | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 65627b6..2afb623 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -344,7 +344,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ studyRegion = NA } system.time( - networkOneway[[2]] <- addGtfsLinks(outputLocation = paste0(outputDir,"/gtfs/"), + networkOneway[[2]] <- addGtfsLinks(outputLocation = paste0(outputDir,"/pt/"), nodes = networkOneway[[1]], links = networkOneway[[2]], gtfs_feed, diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index 1f96b16..d4df59c 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -8,7 +8,7 @@ addGtfsLinks <- function(outputLocation, onroadBus, city){ - # outputLocation = "./output/generated_network/gtfs/" + # outputLocation = "./output/generated_network/pt/" # nodes = networkOneway[[1]] # links = networkOneway[[2]] # gtfs_feed = "./data/gtfs.zip" @@ -67,7 +67,7 @@ addGtfsLinks <- function(outputLocation, return(edgesCombined) } -processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", +processGtfs <- function(outputLocation = "./output/generated_network/pt/", nodes, links, networkNodes, @@ -77,7 +77,7 @@ processGtfs <- function(outputLocation = "./output/generated_network/gtfs/", outputCrs, onroadBus){ - # outputLocation ="./output/generated_network/gtfs/" + # outputLocation ="./output/generated_network/pt/" # networkNodes = validRoadNodes # gtfs_feed = "./data/gtfs.zip" # analysis_date = as.Date("2023-11-15","%Y-%m-%d") From 8554dac0351305d182e6ebf2b5710ef0375b384f Mon Sep 17 00:00:00 2001 From: StevePem Date: Thu, 29 Feb 2024 17:20:50 +1100 Subject: [PATCH 101/103] create pt output directory in gtfs2PtNetwork.R rather than NetworkGenerator.R, if required --- NetworkGenerator.R | 2 +- functions/gtfs2PtNetwork.R | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index 2afb623..b976fac 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -134,7 +134,7 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ if(outputSubdirectory != "" & dir.exists(outputDir)) dir_delete(outputDir) dir_create(paste0('./',outputDir)) sink(paste0('./',outputDir,'/makeMatsimNetwork.log'), append=FALSE, split=TRUE) - if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) + # if (addGtfs) dir_create(paste0(outputDir,"/gtfs")) # Functions -------------------------------------------------------------- diff --git a/functions/gtfs2PtNetwork.R b/functions/gtfs2PtNetwork.R index d4df59c..492b44c 100644 --- a/functions/gtfs2PtNetwork.R +++ b/functions/gtfs2PtNetwork.R @@ -17,6 +17,8 @@ addGtfsLinks <- function(outputLocation, # outputCrs = 7899 # onroadBus = T + dir_create(outputLocation) + validRoadEdges <- links %>% st_drop_geometry() %>% filter(is_walk==1 & is_car==1 & is_cycle==1) %>% From b531fd3b9164add88021a2e0a381c5df96f8ee4f Mon Sep 17 00:00:00 2001 From: StevePem Date: Tue, 19 Mar 2024 17:26:33 +1100 Subject: [PATCH 102/103] more progress reporting where discarding problem features --- functions/getOsmExtract.R | 1 + 1 file changed, 1 insertion(+) diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index af0e0bc..d95160a 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -54,6 +54,7 @@ getOsmExtract <- function(region, message(paste("Problem features detected in OSM layer", current.layer.name, "; removing problems (may take a while)")) for (j in 1:nrow(current.layer)) { + if (j %% 500 == 0) print(paste("Checked", j, "of", nrow(current.layer), "features")) tryCatch({ current.feature.intersected <- current.layer[j,] %>% st_filter(region.buffer, .predicate = st_intersects) From 74f7352c7d0a79c359e8443d315d52f62be5fff7 Mon Sep 17 00:00:00 2001 From: StevePem Date: Wed, 20 Mar 2024 10:09:36 +1100 Subject: [PATCH 103/103] support for region or unconfigured sqlite in different CRS --- NetworkGenerator.R | 13 +++++++++++-- functions/addDestinations.R | 6 +++++- functions/getOsmExtract.R | 9 ++++++--- 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/NetworkGenerator.R b/NetworkGenerator.R index b976fac..6eed931 100644 --- a/NetworkGenerator.R +++ b/NetworkGenerator.R @@ -188,6 +188,12 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ networkUnconfigured <- list(st_read(unconfiguredSqlite, layer = "nodes") %>% st_set_geometry("geom"), st_read(unconfiguredSqlite, layer = "edges") %>% st_set_geometry("geom")) + if (st_crs(networkUnconfigured[[1]])$epsg != outputCrs) { + networkUnconfigured[[1]] <- st_transform(networkUnconfigured[[1]], outputCrs) + } + if(st_crs(networkUnconfigured[[2]])$epsg != outputCrs) { + networkUnconfigured[[2]] <- st_transform(networkUnconfigured[[2]], outputCrs) + } osm_metadata <- st_read(unconfiguredSqlite, layer = "osm_metadata") %>% filter(osm_id %in% networkUnconfigured[[2]]$osm_id) @@ -336,8 +342,11 @@ makeNetwork<-function(city, outputSubdirectory = "generated_network"){ if (file.exists(region)) { # read in the study region boundary echo("Using Region file for GTFS processing\n") - studyRegion <- st_read(region, quiet=T) %>% - st_buffer(regionBufferDist) %>% + region.poly <- st_read(region) + if (st_crs(region.poly)$epsg != outputCrs) { + region.poly <- st_transform(region.poly, outputCrs) + } + studyRegion <- st_buffer(region.poly, regionBufferDist) %>% st_snap_to_grid(1) } else { echo("Region file was not found, skipping\n") diff --git a/functions/addDestinations.R b/functions/addDestinations.R index 022c0c0..765c6e1 100644 --- a/functions/addDestinations.R +++ b/functions/addDestinations.R @@ -45,7 +45,11 @@ addDestinations <- function(nodes_current, polygons <- oe_read(osmGpkg, query = paste(extra.tag.string, "FROM multipolygons"), quiet = TRUE) # read in and buffer the region - study.area <- st_buffer(st_read(region), regionBufferDist) %>% + region.poly <- st_read(region) + if (st_crs(region.poly)$epsg != outputCrs) { + region.poly <- st_transform(region.poly, outputCrs) + } + study.area <- st_buffer(region.poly, regionBufferDist) %>% st_snap_to_grid(1) # function to extract specific destination types from point or polygon layers ---- diff --git a/functions/getOsmExtract.R b/functions/getOsmExtract.R index d95160a..ddbac0d 100644 --- a/functions/getOsmExtract.R +++ b/functions/getOsmExtract.R @@ -12,8 +12,11 @@ getOsmExtract <- function(region, # osmGpkg = "./output/temp_bendigo_osm.gpkg" # load region and buffer by selected distance (eg 10km) - region <- st_read(region) - region.buffer <- st_buffer(region, regionBufferDist) %>% + region.poly <- st_read(region) + if (st_crs(region.poly)$epsg != outputCrs) { + region.poly <- st_transform(region.poly, outputCrs) + } + region.buffer <- st_buffer(region.poly, regionBufferDist) %>% st_snap_to_grid(1) # increase timeout to allow time for large Australia extract to download @@ -21,7 +24,7 @@ getOsmExtract <- function(region, options(timeout = 7200) # download the full extract (whole of Australia; quite slow) - download.url <- oe_match(region, crs = outputCrs)$url + download.url <- oe_match(region.buffer, crs = outputCrs)$url echo(paste("Downloading OSM extract from", download.url, "\n")) full.extract <- oe_download(download.url, download_directory = ".")