Voici certains scripts créés dans le cadre du stage.
# Script de decoupage des GTFS / Inspiré du package UK2GTFS
#### Installation et lancement du package ####
#install.packages("remotes")
#install.packages("leaflet")
#install.packages("sf")
#install.packages(opentripplanner)
#install.packages(tidyverse)
#install.packages("downloader")
library("leaflet")
library("sf")
library(tidyverse)
library(downloader)
# Création des fonctions
#gtfs_read
gtfs_read <- function(path){
checkmate::assert_file_exists(path)
tmp_folder <- file.path(tempdir(),"gtfsread")
dir.create(tmp_folder)
utils::unzip(path, exdir = tmp_folder)
files <- list.files(tmp_folder, pattern = ".txt")
gtfs <- list()
message_log <- c("Unable to find optional files: ")
if(checkmate::test_file_exists(file.path(tmp_folder,"agency.txt"))){
gtfs$agency <- readr::read_csv(file.path(tmp_folder,"agency.txt"))
} else {
warning("Unable to find required file: agency.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"stops.txt"))){
gtfs$stops <- readr::read_csv(file.path(tmp_folder,"stops.txt"))
} else {
warning("Unable to find required file: stops.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"routes.txt"))){
gtfs$routes <- readr::read_csv(file.path(tmp_folder,"routes.txt"),
col_types = readr::cols(route_id = readr::col_character()))
} else {
warning("Unable to find required file: routes.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"trips.txt"))){
gtfs$trips <- readr::read_csv(file.path(tmp_folder,"trips.txt"),
col_types = readr::cols(trip_id = readr::col_character(),
route_id = readr::col_character()))
} else {
warning("Unable to find required file: trips.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"stop_times.txt"))){
gtfs$stop_times <- readr::read_csv(file.path(tmp_folder,"stop_times.txt"),
col_types = readr::cols(trip_id = readr::col_character(),
departure_time = readr::col_character(),
arrival_time = readr::col_character()))
gtfs$stop_times$arrival_time <- lubridate::hms(gtfs$stop_times$arrival_time)
gtfs$stop_times$departure_time <- lubridate::hms(gtfs$stop_times$departure_time)
} else {
warning("Unable to find required file: stop_times.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"calendar.txt"))){
gtfs$calendar <- readr::read_csv(file.path(tmp_folder,"calendar.txt"),
col_types = readr::cols(start_date = readr::col_date(format = "%Y%m%d"),
end_date = readr::col_date(format = "%Y%m%d")))
} else {
message("Unable to find conditionally required file: calendar.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"calendar_dates.txt"))){
gtfs$calendar_dates <- readr::read_csv(file.path(tmp_folder,"calendar_dates.txt"),
col_types = readr::cols(date = readr::col_date(format = "%Y%m%d")))
} else {
message("Unable to find conditionally required file: calendar_dates.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"fare_attributes.txt"))){
gtfs$fare_attributes <- readr::read_csv(file.path(tmp_folder,"fare_attributes.txt"))
} else {
message_log <- c(message_log, "fare_attributes.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"fare_rules.txt"))){
gtfs$fare_rules <- readr::read_csv(file.path(tmp_folder,"fare_rules.txt"))
} else {
message_log <- c(message_log, "fare_rules.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"shapes.txt"))){
gtfs$shapes <- readr::read_csv(file.path(tmp_folder,"shapes.txt"))
} else {
message_log <- c(message_log, "shapes.txt")
}
if(checkmate::test_file_exists(file.path(tmp_folder,"transfers.txt"))){
gtfs$transfers <- readr::read_csv(file.path(tmp_folder,"transfers.txt"))
} else {
message_log <- c(message_log, "transfers.txt")
}
unlink(tmp_folder, recursive = TRUE)
if(length(message_log) > 0){
message(paste(message_log, collapse = " "))
}
return(gtfs)
}
#gtfs_clip
gtfs_clip <- function(gtfs, bounds) {
if(!sf::st_is_longlat(bounds)){
stop("The CRS of bounds is not EPSG:4326, please reproject with sf::st_transform(bounds, 4326)")
}
if (nrow(bounds) > 1) {
message("Multiple geometrys offered, using total area of all geometries")
bounds <- sf::st_combine(bounds)
suppressWarnings(bounds <- sf::st_buffer(bounds, 0))
}
stops <- gtfs$stops
stop_times <- gtfs$stop_times
# bbox <- sf::st_bbox(bounds)
stops_inc <- stops[!is.na(stops$stop_lon), ]
stops_inc$stop_lon <- as.numeric(stops_inc$stop_lon)
stops_inc$stop_lat <- as.numeric(stops_inc$stop_lat)
stops_inc <- sf::st_as_sf(stops_inc, coords = c("stop_lon", "stop_lat"), crs = 4326)
suppressWarnings(stops_inc <- stops_inc[bounds, ])
stops_inc <- unique(stops_inc$stop_id)
gtfs$stops <- gtfs$stops[gtfs$stops$stop_id %in% stops_inc, ]
gtfs$stop_times <- gtfs$stop_times[gtfs$stop_times$stop_id %in% stops_inc, ]
# Check for single stop trips
n_stops <- table(gtfs$stop_times$trip_id)
single_stops <- as.integer(names(n_stops[n_stops == 1]))
gtfs$stop_times <- gtfs$stop_times[!gtfs$stop_times$trip_id %in% single_stops, ]
# Check for any unused stops
gtfs$stops <- gtfs$stops[gtfs$stops$stop_id %in% unique(gtfs$stop_times$stop_id), ]
gtfs$trips <- gtfs$trips[gtfs$trips$trip_id %in% unique(gtfs$stop_times$trip_id), ]
gtfs$routes <- gtfs$routes[gtfs$routes$route_id %in% unique(gtfs$trips$route_id), ]
gtfs$calendar <- gtfs$calendar[gtfs$calendar$service_id %in% unique(gtfs$trips$service_id), ]
gtfs$calendar_dates <- gtfs$calendar_dates[gtfs$calendar_dates$service_id %in% unique(gtfs$trips$service_id), ]
gtfs$agency <- gtfs$agency[gtfs$agency$agency_id %in% unique(gtfs$routes$agency_id), ]
return(gtfs)
}
#gtfs_clean
gtfs_clean <- function(gtfs) {
# 1 Remove stops with no locations
gtfs$stop_times <- gtfs$stop_times[gtfs$stop_times$stop_id %in% unique(gtfs$stops$stop_id), ]
# 2 Remove stops that are never used
gtfs$stops <- gtfs$stops[gtfs$stops$stop_id %in% unique(gtfs$stop_times$stop_id), ]
return(gtfs)
}
#gtfs_force_valid
gtfs_force_valid <- function(gtfs) {
message("Cette fonction supprime certaines erreurs mais ne les corrige pas")
# Stops with missing lat/lon
gtfs$stops <- gtfs$stops[!is.na(gtfs$stops$stop_lon) & !is.na(gtfs$stops$stop_lat),]
# Stop Times that are not in stops
gtfs$stop_times <- gtfs$stop_times[gtfs$stop_times$stop_id %in% gtfs$stops$stop_id,]
#Trips that are not in trips
gtfs$stop_times <- gtfs$stop_times[gtfs$stop_times$trip_id %in% gtfs$trips$trip_id,]
return(gtfs)
}
#gtfs_write
gtfs_write <- function(gtfs,
folder = getwd(),
name = "gtfs",
stripComma = TRUE,
stripTab = TRUE,
quote = FALSE) {
if (stripComma) {
for (i in seq_len(length(gtfs))) {
gtfs[[i]] <- stripCommas(gtfs[[i]])
}
}
if (stripTab) {
for (i in seq_len(length(gtfs))) {
gtfs[[i]] <- stripTabs(gtfs[[i]])
}
}
#Format Dates
if(class(gtfs$calendar$start_date) == "Date"){
gtfs$calendar$start_date <- format(gtfs$calendar$start_date, "%Y%m%d")
}
if(class(gtfs$calendar$end_date) == "Date"){
gtfs$calendar$end_date <- format(gtfs$calendar$end_date, "%Y%m%d")
}
if(class(gtfs$calendar_dates$date) == "Date"){
gtfs$calendar_dates$date <- format(gtfs$calendar_dates$date, "%Y%m%d")
}
#Format times
if(class(gtfs$stop_times$arrival_time) == "Period"){
gtfs$stop_times$arrival_time <- period2gtfs(gtfs$stop_times$arrival_time)
}
if(class(gtfs$stop_times$departure_time) == "Period"){
gtfs$stop_times$departure_time <- period2gtfs(gtfs$stop_times$departure_time)
}
dir.create(paste0(folder, "/gtfs_temp"))
utils::write.csv(gtfs$calendar, paste0(folder, "/gtfs_temp/calendar.txt"), row.names = FALSE, quote = quote)
if (nrow(gtfs$calendar_dates) > 0) {
utils::write.csv(gtfs$calendar_dates, paste0(folder, "/gtfs_temp/calendar_dates.txt"), row.names = FALSE, quote = quote)
}
utils::write.csv(gtfs$routes, paste0(folder, "/gtfs_temp/routes.txt"), row.names = FALSE, quote = quote)
utils::write.csv(gtfs$stop_times, paste0(folder, "/gtfs_temp/stop_times.txt"), row.names = FALSE, quote = quote)
utils::write.csv(gtfs$trips, paste0(folder, "/gtfs_temp/trips.txt"), row.names = FALSE, quote = quote)
utils::write.csv(gtfs$stops, paste0(folder, "/gtfs_temp/stops.txt"), row.names = FALSE, quote = quote)
utils::write.csv(gtfs$agency, paste0(folder, "/gtfs_temp/agency.txt"), row.names = FALSE, quote = quote)
if ("transfers" %in% names(gtfs)) {
utils::write.csv(gtfs$transfers, paste0(folder, "/gtfs_temp/transfers.txt"), row.names = FALSE, quote = quote)
}
zip::zipr(paste0(folder, "/", name, ".zip"), list.files(paste0(folder, "/gtfs_temp"), full.names = TRUE), recurse = FALSE)
unlink(paste0(folder, "/gtfs_temp"), recursive = TRUE)
message(paste0(folder, "/", name, ".zip"))
}
#### Definition du chemin de la couche de decoupage (EPSG 4326) et du chemin de sortie ####
sourceZone <- "C:/Users/boucherec/Documents/Stage/Travail/Decoupage-GTFS/UK2GTFS/clip"
sortie <- "C:/Users/boucherec/Documents/Stage/OTP/otp-data/transfrontalier/"
#### Import et visualisation de la zone de decoupe ####
Zone <- st_read(dsn = sourceZone, layer = 'zone_25km_frontiere_grand-est_4326')
plot(Zone)
#---------SUISSE------------
#### Telechargement et definition du chemin des donnees ####
sourceGTFS <- "C:/Users/boucherec/Documents/Stage/telechargements/suisse.zip"
download("https://opentransportdata.swiss/dataset/1aff176a-9665-4395-a3b1-03e3032a0373/resource/349438b3-6d65-4124-950d-3ad37609b7bb/download/gtfs_fp2021_2021-06-23_09-10.zip",sourceGTFS, mode = "wb")
#### Visualisation de l'emprise du GTFS d'entree ####
# Lecture du GTFS
gtfs <- gtfs_read(sourceGTFS)
# Creation de la carte ( basee sur les arrets)
# carte_arrets_entree = leaflet() %>%
# # Ajout du fond de plan
# addProviderTiles("CartoDB.Voyager") %>%
# # Ajout des arrets
# addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs$stops,
# stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
# carte_arrets_entree
#### Decoupage et visualisation du GTFS decoupe ####
gtfs2 <- gtfs_clip(gtfs,Zone)
# Creation de la carte basee sur les arrets
carte_arrets_entree = leaflet() %>%
# Ajout du fond de plan
addProviderTiles("CartoDB.Voyager") %>%
# Ajout des arrets
addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs2$stops,
stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
carte_arrets_entree
#### Suppression des transfers avec stops supprimes ####
transfers <- gtfs2$transfers
stops <- gtfs2$stops
# Preparation des stops pour la jointure
stops2 <- stops %>% rename(from_stop_id = stop_id)
stops2$from_stop_id <- as.character(stops2$from_stop_id)
stops3 <- stops %>% rename(to_stop_id = stop_id)
stops3$to_stop_id <- as.character(stops3$to_stop_id)
# Preparation des transfers pour la jointure
transfers$from_stop_id <- as.character(transfers$from_stop_id)
transfers$to_stop_id <- as.character(transfers$to_stop_id)
# Jointure et suppression des champs incomplets
transfers2 <- left_join(stops3,transfers) %>% select(from_stop_id, to_stop_id, transfer_type, min_transfer_time) %>% unique()
transfers2 <- transfers2 %>% filter( !is.na(from_stop_id) & !is.na(to_stop_id) )
# Remplacement dans le GTFS par couche corrigee
gtfs2$transfers <- transfers2
#### Suppression des trips avec moins de deux arrets ####
trips <- gtfs2$trips
stop_times <- gtfs2$stop_times
# Jointure entre trips et stop_times
trips2 <- right_join(trips,stop_times) %>% select(route_id, service_id, trip_id, trip_headsign, trip_short_name, direction_id, stop_id)
# Compte du nombre d'arret par trips et selection des trips avec plus de 1 arrets
compte <- trips2 %>% group_by(trip_id) %>% summarise(nb = n())
compte <- compte %>% filter(nb>1)
# Jointures attributaires entre les deux tables et la table compte
trips2 <- right_join(trips,compte) %>% select(route_id, service_id, trip_id, trip_headsign, trip_short_name, direction_id)
stop_times2 <- right_join(stop_times,compte) %>% select(trip_id, arrival_time, departure_time, stop_id, stop_sequence, pickup_type, drop_off_type)
# Remplacement dans le GTFS par couches corrigees
gtfs2$trips <- trips2
gtfs2$stop_times <- stop_times2
#### Nettoyage et ecriture du GTFS ####
gtfs_clean(gtfs2)
gtfs_force_valid(gtfs2)
# Ecriture du nouveau GTFS
gtfs_write(gtfs2, sortie, name ="suisse_gtfs")
#### Suppression des fichiers crees ####
rm(carte_arrets_entree)
rm(compte)
rm(gtfs)
rm(gtfs2)
rm(stop_times)
rm(stop_times2)
rm(stops)
rm(stops2)
rm(stops3)
rm(transfers)
rm(transfers2)
rm(trips)
rm(trips2)
#---------ALLEMAGNE (transit local)------------
#### Telechargement et definition du chemin des donnees ####
source2GTFS <- "C:/Users/boucherec/Documents/Stage/telechargements/allemagne_tl.zip"
download("https://download.gtfs.de/germany/nv_free/latest.zip",source2GTFS, mode = "wb")
#### Lecture du GTFS ####
gtfs <- gtfs_read(source2GTFS)
#### Decoupage et visualisation du GTFS decoupe ####
gtfs2 <- gtfs_clip(gtfs,Zone)
# Creation de la carte basee sur les arrets
carte_arrets_entree = leaflet() %>%
# Ajout du fond de plan
addProviderTiles("CartoDB.Voyager") %>%
# Ajout des arrets
addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs2$stops,
stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
carte_arrets_entree
#### Suppression des trips avec moins de deux arrets ####
trips <- gtfs2$trips
stop_times <- gtfs2$stop_times
# Jointure entre trips et stop_times
trips2 <- right_join(trips,stop_times) %>% select(route_id, service_id, trip_id, direction_id, stop_id)
# Compte du nombre d'arret par trips et selection des trips avec plus de 1 arrets
compte <- trips2 %>% group_by(trip_id) %>% summarise(nb = n())
compte <- compte %>% filter(nb>1)
# Jointures attributaires entre les deux tables et la table compte
trips2 <- right_join(trips,compte) %>% select(route_id, service_id, trip_id, direction_id)
stop_times2 <- right_join(stop_times,compte) %>% select(trip_id, arrival_time, departure_time, stop_id, stop_sequence, pickup_type, drop_off_type)
# Remplacement dans le GTFS par couches corrigees
gtfs2$trips <- trips2
gtfs2$stop_times <- stop_times2
#### Nettoyage et ecriture du GTFS ####
gtfs_clean(gtfs2)
gtfs_force_valid(gtfs2)
# Ecriture du nouveau GTFS
gtfs_write(gtfs2, sortie, name = "allemagne_local_transit")
#### Suppression des fichiers crees ####
rm(carte_arrets_entree)
rm(compte)
rm(gtfs)
rm(gtfs2)
rm(stop_times)
rm(stop_times2)
rm(trips)
rm(trips2)
#---------ALLEMAGNE (long distance rail)------------
#### Telechargement et definition du chemin des donnees ####
source3GTFS <- "C:/Users/boucherec/Documents/Stage/telechargements/allemagne_ldr.zip"
download("https://download.gtfs.de/germany/fv_free/latest.zip",source3GTFS, mode = "wb")
#### Lecture du GTFS ####
gtfs <- gtfs_read(source3GTFS)
#### Decoupage et visualisation du GTFS decoupe ####
gtfs2 <- gtfs_clip(gtfs,Zone)
# Creation de la carte basee sur les arrets
carte_arrets_entree = leaflet() %>%
# Ajout du fond de plan
addProviderTiles("CartoDB.Voyager") %>%
# Ajout des arrets
addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs2$stops,
stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
carte_arrets_entree
#### Suppression des trips avec moins de deux arrets ####
trips <- gtfs2$trips
stop_times <- gtfs2$stop_times
# Jointure entre trips et stop_times
trips2 <- right_join(trips,stop_times) %>% select(route_id, service_id, trip_id, direction_id, stop_id)
# Compte du nombre d'arret par trips et selection des trips avec plus de 1 arrets
compte <- trips2 %>% group_by(trip_id) %>% summarise(nb = n())
compte <- compte %>% filter(nb>1)
# Jointures attributaires entre les deux tables et la table compte
trips2 <- right_join(trips,compte) %>% select(route_id, service_id, trip_id, direction_id)
stop_times2 <- right_join(stop_times,compte) %>% select(trip_id, arrival_time, departure_time, stop_id, stop_sequence, pickup_type, drop_off_type)
# Remplacement dans le GTFS par couches corrigees
gtfs2$trips <- trips2
gtfs2$stop_times <- stop_times2
#### Nettoyage et ecriture du GTFS ####
gtfs_clean(gtfs2)
gtfs_force_valid(gtfs2)
# Ecriture du nouveau GTFS
gtfs_write(gtfs2, sortie, name = "allemagne_long_distance_rail")
#### Suppression des fichiers crees ####
rm(carte_arrets_entree)
rm(compte)
rm(gtfs)
rm(gtfs2)
rm(stop_times)
rm(stop_times2)
rm(trips)
rm(trips2)
#---------ALLEMAGNE (regional rail)------------
#### Telechargement et definition du chemin des donnees ####
source4GTFS <- "C:/Users/boucherec/Documents/Stage/telechargements/allemagne_rr.zip"
download("https://download.gtfs.de/germany/rv_free/latest.zip",source4GTFS, mode = "wb")
#### Lecture du GTFS ####
gtfs <- gtfs_read(source4GTFS)
#### Decoupage et visualisation du GTFS decoupe ####
gtfs2 <- gtfs_clip(gtfs,Zone)
# Creation de la carte basee sur les arrets
carte_arrets_entree = leaflet() %>%
# Ajout du fond de plan
addProviderTiles("CartoDB.Voyager") %>%
# Ajout des arrets
addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs2$stops,
stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
carte_arrets_entree
#### Suppression des trips avec moins de deux arrets ####
trips <- gtfs2$trips
stop_times <- gtfs2$stop_times
# Jointure entre trips et stop_times
trips2 <- right_join(trips,stop_times) %>% select(route_id, service_id, trip_id, direction_id, stop_id)
# Compte du nombre d'arret par trips et selection des trips avec plus de 1 arrets
compte <- trips2 %>% group_by(trip_id) %>% summarise(nb = n())
compte <- compte %>% filter(nb>1)
# Jointures attributaires entre les deux tables et la table compte
trips2 <- right_join(trips,compte) %>% select(route_id, service_id, trip_id, direction_id)
stop_times2 <- right_join(stop_times,compte) %>% select(trip_id, arrival_time, departure_time, stop_id, stop_sequence, pickup_type, drop_off_type)
# Remplacement dans le GTFS par couches corrigees
gtfs2$trips <- trips2
gtfs2$stop_times <- stop_times2
#### Nettoyage et ecriture du GTFS ####
gtfs_clean(gtfs2)
gtfs_force_valid(gtfs2)
# Ecriture du nouveau GTFS
gtfs_write(gtfs2, sortie, name = "allemagne_regional_rail")
#### Suppression des fichiers crees ####
rm(carte_arrets_entree)
rm(compte)
rm(gtfs)
rm(gtfs2)
rm(stop_times)
rm(stop_times2)
rm(trips)
rm(trips2)
#---------BELGIQUE------------
#### Telechargement et definition du chemin des donnees ####
source5GTFS <- "C:/Users/boucherec/Documents/Stage/telechargements/belgique.zip"
download("http://opendata.tec-wl.be/Current%20GTFS/TEC-GTFS.zip",source5GTFS, mode = "wb")
#### Visualisation de l'emprise du GTFS d'entree ####
# Lecture du GTFS
gtfs <- gtfs_read(source5GTFS)
#### Decoupage et visualisation du GTFS decoupe ####
gtfs2 <- gtfs_clip(gtfs,Zone)
# Creation de la carte basee sur les arrets
carte_arrets_entree = leaflet() %>%
# Ajout du fond de plan
addProviderTiles("CartoDB.Voyager") %>%
# Ajout des arrets
addCircleMarkers(lng= ~as.numeric(stop_lon), lat= ~as.numeric(stop_lat), data = gtfs2$stops,
stroke = FALSE, fillOpacity = 0.5, radius =1 )
# Affichage de la carte
carte_arrets_entree
#### Suppression des trips avec moins de deux arrets ####
trips <- gtfs2$trips
stop_times <- gtfs2$stop_times
# Jointure entre trips et stop_times
trips2 <- right_join(trips,stop_times) %>% select(route_id, service_id, trip_id, trip_short_name, direction_id, stop_id)
# Compte du nombre d'arret par trips et selection des trips avec plus de 1 arrets
compte <- trips2 %>% group_by(trip_id) %>% summarise(nb = n())
compte <- compte %>% filter(nb>1)
# Jointures attributaires entre les deux tables et la table compte
trips2 <- right_join(trips,compte) %>% select(route_id, service_id, trip_id, trip_short_name, direction_id)
stop_times2 <- right_join(stop_times,compte) %>% select(trip_id, arrival_time, departure_time, stop_id, stop_sequence, pickup_type, drop_off_type)
# Remplacement dans le GTFS par couches corrigees
gtfs2$trips <- trips2
gtfs2$stop_times <- stop_times2
#### Nettoyage et ecriture du GTFS ####
gtfs_clean(gtfs2)
gtfs_force_valid(gtfs2)
# Ecriture du nouveau GTFS
gtfs_write(gtfs2, sortie, name ="belgique_gtfs")
#### Suppression des fichiers crees ####
rm(carte_arrets_entree)
rm(compte)
rm(gtfs)
rm(gtfs2)
rm(stop_times)
rm(stop_times2)
rm(stops)
rm(stops2)
rm(stops3)
rm(transfers)
rm(transfers2)
rm(trips)
rm(trips2)
# Script de création d'une couche de lignes à partir d'un gtfs
library(UK2GTFS)
library("sf")
# Source du GTFS d'entree
source <- "C:/Users/boucherec/Documents/Stage/OTP/otp-data/transfrontalier/suisse_gtfs.zip"
# Lecture du GTFS
gtfs <- gtfs_read(source)
# Creation de la fonction de traitements
gtfs_trips_sf <- function(gtfs){
stop_times <- gtfs$stop_times
stops <- gtfs$stops
stops <- stops[,c("stop_id","stop_lon","stop_lat")]
if(class(stops$stop_lon) != "numeric"){
stops$stop_lon <- as.numeric(stops$stop_lon)
}
if(class(stops$stop_lat) != "numeric"){
stops$stop_lat <- as.numeric(stops$stop_lat)
}
if(anyNA(stops$stop_lon) | anyNA(stops$stop_lat)){
message("Stops with missing lat/lng removed")
stops <- stops[!is.na(stops$stop_lon),]
stops <- stops[!is.na(stops$stop_lat),]
}
df2line <- function(x){
geom <- as.matrix(x[,c("stop_lon","stop_lat")])
geom <- sf::st_linestring(geom)
res <- data.frame(trip_id = x$trip_id[1],
geometry = sf::st_sfc(list(geom)))
res <- sf::st_as_sf(res, crs = 4326)
return(res)
}
stop_times <- dplyr::left_join(stop_times, stops, by = "stop_id")
stop_times <- dplyr::group_by(stop_times, trip_id)
stop_times <- dplyr::group_split(stop_times)
stop_times <- lapply(stop_times, df2line)
stop_times <- dplyr::bind_rows(stop_times)
return(stop_times)
}
# Creation des lignes
lignes <- gtfs_trips_sf(gtfs)
# Ecriture du shapefile des lignes
st_write(lignes, "C:/Users/boucherec/Documents/Stage/Travail/Transfrontalier/Carto/lignes/lignes_suisse.shp")
# Script d'ajout des lignes aux arrêts d'un GTFS
library(UK2GTFS)
library("tidyverse")
source <- "C:/Users/boucherec/Documents/Stage/OTP/otp-data/transfrontalier/allemagne_local_transit.zip"
gtfs <- gtfs_read(source)
routes <- gtfs$routes
stop_times <- gtfs$stop_times
stops <- gtfs$stops
trips <- gtfs$trips
stops2 <- left_join(stop_times,stops) %>% select(trip_id, stop_id, stop_name, stop_lat, stop_lon)
stops3 <- left_join(trips,stops2) %>% select(route_id, trip_id, stop_id, stop_name, stop_lat, stop_lon)
stops4 <- left_join(routes,stops3) %>% select(route_long_name, route_id, trip_id, stop_id, stop_name, stop_lat, stop_lon)
stops5 <- stops4 %>% select(route_long_name, stop_id, stop_lat, stop_lon) %>% unique()
write.csv(stops5, "C:/Users/boucherec/Documents/Stage/Travail/Transfrontalier/Carto/arrets/arretslt.csv")
"""
Modeleur graphique QGIS exporté en Python
Nom : Analyse des anomalies PDM - LDM
Version QGIS : 3.1607
"""
from qgis.core import QgsProcessing
from qgis.core import QgsProcessingAlgorithm
from qgis.core import QgsProcessingMultiStepFeedback
from qgis.core import QgsProcessingParameterVectorLayer
from qgis.core import QgsProcessingParameterNumber
from qgis.core import QgsProcessingParameterFile
from qgis.core import QgsProcessingParameterFeatureSink
from qgis.core import QgsCoordinateReferenceSystem
import processing
class AnalyseExportProd(QgsProcessingAlgorithm):
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterVectorLayer('Lieuxdemobilit', 'Lieux de mobilité', types=[QgsProcessing.TypeVectorPoint], defaultValue=None))
self.addParameter(QgsProcessingParameterVectorLayer('Pointsdemobilit', 'Points de mobilité', types=[QgsProcessing.TypeVectorPoint], defaultValue=None))
self.addParameter(QgsProcessingParameterNumber('RecherchedesLDMproches', 'Recherche des LDM proches (en mètres)', type=QgsProcessingParameterNumber.Integer, minValue=10, maxValue=500, defaultValue=50))
self.addParameter(QgsProcessingParameterNumber('RecherchedesPDMloignsdesLDMenmtres', 'Recherche des PDM éloignés des LDM (en mètres)', type=QgsProcessingParameterNumber.Integer, minValue=10, maxValue=500, defaultValue=150))
self.addParameter(QgsProcessingParameterFile('StyleZonesLDM', 'Style "Zones-LDM"', behavior=QgsProcessingParameterFile.File, fileFilter='Tous les fichiers (*.*)', defaultValue=None))
self.addParameter(QgsProcessingParameterFile('StyleZonesLDM (2)', 'Style "Anomalies"', behavior=QgsProcessingParameterFile.File, fileFilter='Tous les fichiers (*.*)', defaultValue='C:\\Users\\boucherec\\Documents\\Stage\\Travail\\PEM\\nancy\\styles\\Anomalies.qml'))
self.addParameter(QgsProcessingParameterFeatureSink('Zones_ldm', 'Zones_LDM', type=QgsProcessing.TypeVectorAnyGeometry, createByDefault=True, defaultValue=None))
self.addParameter(QgsProcessingParameterFeatureSink('Anomalies', 'Anomalies', type=QgsProcessing.TypeVectorAnyGeometry, createByDefault=True, defaultValue=None))
def processAlgorithm(self, parameters, context, model_feedback):
# Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the
# overall progress through the model
feedback = QgsProcessingMultiStepFeedback(46, model_feedback)
results = {}
outputs = {}
# PDM-2154
alg_params = {
'INPUT': parameters['Pointsdemobilit'],
'OPERATION': '',
'TARGET_CRS': QgsCoordinateReferenceSystem('EPSG:2154'),
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Pdm2154'] = processing.run('native:reprojectlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(1)
if feedback.isCanceled():
return {}
# Points vers lignes
alg_params = {
'CLOSE_PATH': True,
'DATE_FORMAT': '',
'GROUP_FIELD': 'LDMCodeRef',
'INPUT': outputs['Pdm2154']['OUTPUT'],
'ORDER_FIELD': 'PDMCode',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['PointsVersLignes'] = processing.run('qgis:pointstopath', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(2)
if feedback.isCanceled():
return {}
# LDM-2154
alg_params = {
'INPUT': parameters['Lieuxdemobilit'],
'OPERATION': '',
'TARGET_CRS': QgsCoordinateReferenceSystem('EPSG:2154'),
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Ldm2154'] = processing.run('native:reprojectlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(3)
if feedback.isCanceled():
return {}
# Matrice des distances
alg_params = {
'INPUT': outputs['Ldm2154']['OUTPUT'],
'INPUT_FIELD': 'LDMCode',
'MATRIX_TYPE': 0,
'NEAREST_POINTS': 1,
'TARGET': outputs['Ldm2154']['OUTPUT'],
'TARGET_FIELD': 'LDMCode',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['MatriceDesDistances'] = processing.run('qgis:distancematrix', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(4)
if feedback.isCanceled():
return {}
# Polygones de Voronoï
alg_params = {
'BUFFER': 5,
'INPUT': outputs['Ldm2154']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['PolygonesDeVorono'] = processing.run('qgis:voronoipolygons', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(5)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCodeRef\"','length': 0,'name': 'LDMCodeRef','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '\"WheelchairAccess\"','length': 0,'name': 'WheelchairAccess','precision': 0,'type': 10}],
'INPUT': outputs['Pdm2154']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(6)
if feedback.isCanceled():
return {}
# Rejoindre par des lignes (lignes de hub)
alg_params = {
'ANTIMERIDIAN_SPLIT': False,
'GEODESIC': False,
'GEODESIC_DISTANCE': 1000,
'HUBS': outputs['Ldm2154']['OUTPUT'],
'HUB_FIELD': 'LDMCode',
'HUB_FIELDS': ['LDMCode'],
'SPOKES': outputs['RefactoriserLesChamps']['OUTPUT'],
'SPOKE_FIELD': 'LDMCodeRef',
'SPOKE_FIELDS': ['PDMCode'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RejoindreParDesLignesLignesDeHub'] = processing.run('native:hublines', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(7)
if feedback.isCanceled():
return {}
# Réparer les géométries
alg_params = {
'INPUT': outputs['PointsVersLignes']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RparerLesGomtries'] = processing.run('native:fixgeometries', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(8)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10},{'expression': '$length','length': 0,'name': 'long','precision': 0,'type': 4}],
'INPUT': outputs['RejoindreParDesLignesLignesDeHub']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(9)
if feedback.isCanceled():
return {}
# Joindre les attributs par localisation
alg_params = {
'DISCARD_NONMATCHING': False,
'INPUT': outputs['Pdm2154']['OUTPUT'],
'JOIN': outputs['PolygonesDeVorono']['OUTPUT'],
'JOIN_FIELDS': ['LDMCode'],
'METHOD': 0,
'PREDICATE': [0],
'PREFIX': '',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['JoindreLesAttributsParLocalisation'] = processing.run('native:joinattributesbylocation', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(10)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '$x','length': 20,'name': 'long','precision': 0,'type': 6},{'expression': '$y','length': 20,'name': 'lat','precision': 0,'type': 6}],
'INPUT': outputs['Ldm2154']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(11)
if feedback.isCanceled():
return {}
# Extraire par attribut
alg_params = {
'FIELD': 'long',
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'OPERATOR': 2,
'VALUE': parameters['RecherchedesPDMloignsdesLDMenmtres'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(12)
if feedback.isCanceled():
return {}
# Extraire par attribut
alg_params = {
'FIELD': 'long',
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'OPERATOR': 0,
'VALUE': '0',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(13)
if feedback.isCanceled():
return {}
# Extraire par attribut
alg_params = {
'FIELD': 'Distance',
'INPUT': outputs['MatriceDesDistances']['OUTPUT'],
'OPERATOR': 4,
'VALUE': parameters['RecherchedesLDMproches'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(14)
if feedback.isCanceled():
return {}
# Tampon 10m
alg_params = {
'DISSOLVE': False,
'DISTANCE': 10,
'END_CAP_STYLE': 0,
'INPUT': outputs['RparerLesGomtries']['OUTPUT'],
'JOIN_STYLE': 0,
'MITER_LIMIT': 2,
'SEGMENTS': 5,
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Tampon10m'] = processing.run('native:buffer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(15)
if feedback.isCanceled():
return {}
# Extraire par localisation
alg_params = {
'INPUT': outputs['Pdm2154']['OUTPUT'],
'INTERSECT': outputs['ExtraireParAttribut']['OUTPUT'],
'PREDICATE': [0],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParLocalisation'] = processing.run('native:extractbylocation', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(16)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCodeRef\"','length': 0,'name': 'LDMCodeRef','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '\"WheelchairAccess\"','length': 0,'name': 'WheelchairAccess','precision': 0,'type': 10},{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': 'LDMCodeRef=LDMCode','length': 0,'name': 'egal','precision': 0,'type': 0}],
'INPUT': outputs['JoindreLesAttributsParLocalisation']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(17)
if feedback.isCanceled():
return {}
# Supprimer les doublons par attribut
alg_params = {
'FIELDS': ['long','lat'],
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'DUPLICATES': QgsProcessing.TEMPORARY_OUTPUT,
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['SupprimerLesDoublonsParAttribut'] = processing.run('native:removeduplicatesbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(18)
if feedback.isCanceled():
return {}
# Refactoriser les champs 2
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCodeRef\"','length': 0,'name': 'LDMCodeRef','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '\"WheelchairAccess\"','length': 0,'name': 'WheelchairAccess','precision': 0,'type': 10},{'expression': '\'PDM loin de son LDM\'','length': 0,'name': 'type','precision': 0,'type': 10}],
'INPUT': outputs['ExtraireParLocalisation']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps2'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(19)
if feedback.isCanceled():
return {}
# Refactoriser les champs 5
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '\"long\"','length': -1,'name': 'long','precision': 0,'type': 6},{'expression': '\"lat\"','length': -1,'name': 'lat','precision': 0,'type': 6},{'expression': '\'Superposition de deux LDM\'','length': 0,'name': 'type','precision': 0,'type': 10}],
'INPUT': outputs['SupprimerLesDoublonsParAttribut']['DUPLICATES'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps5'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(20)
if feedback.isCanceled():
return {}
# Extraire par attribut
# Cette sélection attributaire permet de supprimer les doublons avec les LDM qui se superposent.
alg_params = {
'FIELD': 'Distance',
'INPUT': outputs['ExtraireParAttribut']['OUTPUT'],
'OPERATOR': 2,
'VALUE': '0',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(21)
if feedback.isCanceled():
return {}
# Extraire par attribut
alg_params = {
'FIELD': 'egal',
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'OPERATOR': 0,
'VALUE': '0',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(22)
if feedback.isCanceled():
return {}
# Tampon LDM seuls
alg_params = {
'DISSOLVE': False,
'DISTANCE': 10,
'END_CAP_STYLE': 0,
'INPUT': outputs['ExtraireParAttribut']['OUTPUT'],
'JOIN_STYLE': 0,
'MITER_LIMIT': 2,
'SEGMENTS': 5,
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['TamponLdmSeuls'] = processing.run('native:buffer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(23)
if feedback.isCanceled():
return {}
# Joindre les attributs par valeur de champ
alg_params = {
'DISCARD_NONMATCHING': False,
'FIELD': 'LDMCodeRef',
'FIELDS_TO_COPY': ['LDMCode'],
'FIELD_2': 'LDMCode',
'INPUT': outputs['ExtraireParAttribut']['OUTPUT'],
'INPUT_2': outputs['RefactoriserLesChamps5']['OUTPUT'],
'METHOD': 1,
'PREFIX': '',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['JoindreLesAttributsParValeurDeChamp'] = processing.run('native:joinattributestable', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(24)
if feedback.isCanceled():
return {}
# Enveloppe convexe
alg_params = {
'INPUT': outputs['Tampon10m']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['EnveloppeConvexe'] = processing.run('native:convexhull', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(25)
if feedback.isCanceled():
return {}
# Fusionner des couches vecteur
alg_params = {
'CRS': QgsCoordinateReferenceSystem('EPSG:2154'),
'LAYERS': [outputs['TamponLdmSeuls']['OUTPUT'],outputs['EnveloppeConvexe']['OUTPUT']],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['FusionnerDesCouchesVecteur'] = processing.run('native:mergevectorlayers', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(26)
if feedback.isCanceled():
return {}
# Extraire par attribut
# Cette étape et la précédente permettent de supprimer les "PDM pas associé au LDM le plus proche" si ils sont contenus dans des LDM superposées
alg_params = {
'FIELD': 'LDMCode_2',
'INPUT': outputs['JoindreLesAttributsParValeurDeChamp']['OUTPUT'],
'OPERATOR': 8,
'VALUE': '',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtraireParAttribut'] = processing.run('native:extractbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(27)
if feedback.isCanceled():
return {}
# Tampon
alg_params = {
'DISSOLVE': False,
'DISTANCE': 25,
'END_CAP_STYLE': 0,
'INPUT': outputs['ExtraireParAttribut']['OUTPUT'],
'JOIN_STYLE': 0,
'MITER_LIMIT': 2,
'SEGMENTS': 5,
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Tampon'] = processing.run('native:buffer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(28)
if feedback.isCanceled():
return {}
# Refactoriser les champs 4
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCodeRef\"','length': 0,'name': 'LDMCodeRef','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10},{'expression': '\"Name\"','length': 0,'name': 'Name','precision': 0,'type': 10},{'expression': '\"Type\"','length': 0,'name': 'Type','precision': 0,'type': 10},{'expression': '\"Latitude\"','length': 0,'name': 'Latitude','precision': 0,'type': 10},{'expression': '\"Longitude\"','length': 0,'name': 'Longitude','precision': 0,'type': 10},{'expression': '\"LocalityCode\"','length': 0,'name': 'LocalityCode','precision': 0,'type': 10},{'expression': '\"LocalityName\"','length': 0,'name': 'LocalityName','precision': 0,'type': 10},{'expression': '\"CountryCode\"','length': 0,'name': 'CountryCode','precision': 0,'type': 10},{'expression': '\"WheelchairAccess\"','length': 0,'name': 'WheelchairAccess','precision': 0,'type': 10},{'expression': '\"egal\"','length': 0,'name': 'egal','precision': 0,'type': 10},{'expression': '\'PDM pas associé au LDM le plus proche\'','length': 50,'name': 'type','precision': 0,'type': 10}],
'INPUT': outputs['ExtraireParAttribut']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps4'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(29)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': 'concat(\"LDMCode\",\"LDMCodeRef\")','length': 0,'name': 'LDMCode','precision': 0,'type': 10}],
'INPUT': outputs['FusionnerDesCouchesVecteur']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(30)
if feedback.isCanceled():
return {}
# Supprimer les doublons par attribut
alg_params = {
'FIELDS': ['LDMCode'],
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'OUTPUT': parameters['Zones_ldm']
}
outputs['SupprimerLesDoublonsParAttribut'] = processing.run('native:removeduplicatesbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
results['Zones_ldm'] = outputs['SupprimerLesDoublonsParAttribut']['OUTPUT']
feedback.setCurrentStep(31)
if feedback.isCanceled():
return {}
# Centroïdes
alg_params = {
'ALL_PARTS': False,
'INPUT': outputs['Tampon']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Centrodes'] = processing.run('native:centroids', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(32)
if feedback.isCanceled():
return {}
# Polygones vers lignes
alg_params = {
'INPUT': outputs['SupprimerLesDoublonsParAttribut']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['PolygonesVersLignes'] = processing.run('native:polygonstolines', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(33)
if feedback.isCanceled():
return {}
# Configurer un style pour une couche vecteur
alg_params = {
'INPUT': outputs['SupprimerLesDoublonsParAttribut']['OUTPUT'],
'STYLE': parameters['StyleZonesLDM']
}
outputs['ConfigurerUnStylePourUneCoucheVecteur'] = processing.run('qgis:setstyleforvectorlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(34)
if feedback.isCanceled():
return {}
# Intersections de lignes
alg_params = {
'INPUT': outputs['PolygonesVersLignes']['OUTPUT'],
'INPUT_FIELDS': [''],
'INTERSECT': outputs['PolygonesVersLignes']['OUTPUT'],
'INTERSECT_FIELDS': [''],
'INTERSECT_FIELDS_PREFIX': '',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['IntersectionsDeLignes'] = processing.run('native:lineintersections', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(35)
if feedback.isCanceled():
return {}
# Points vers lignes
alg_params = {
'CLOSE_PATH': False,
'DATE_FORMAT': '',
'GROUP_FIELD': 'LDMCode',
'INPUT': outputs['IntersectionsDeLignes']['OUTPUT'],
'ORDER_FIELD': 'LDMCode',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['PointsVersLignes'] = processing.run('qgis:pointstopath', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(36)
if feedback.isCanceled():
return {}
# Tampon avant centroïdes
alg_params = {
'DISSOLVE': False,
'DISTANCE': 30,
'END_CAP_STYLE': 0,
'INPUT': outputs['PointsVersLignes']['OUTPUT'],
'JOIN_STYLE': 0,
'MITER_LIMIT': 2,
'SEGMENTS': 5,
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['TamponAvantCentrodes'] = processing.run('native:buffer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(37)
if feedback.isCanceled():
return {}
# Centroïdes
alg_params = {
'ALL_PARTS': False,
'INPUT': outputs['TamponAvantCentrodes']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['Centrodes'] = processing.run('native:centroids', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(38)
if feedback.isCanceled():
return {}
# Extraire pas localisation
# Cette sélection par localisation évite la redondance entre les LDM qui se superposent et les LDM proches.
A noter : 2 LDM peuvent être proches mais pas se superposer et 2 LDM peuvent être superposés mais pas proches (centroïde pris en compte et non pas périmètre)
alg_params = {
'INPUT': outputs['Centrodes']['OUTPUT'],
'INTERSECT': outputs['TamponAvantCentrodes']['OUTPUT'],
'PREDICATE': [2],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['ExtrairePasLocalisation'] = processing.run('native:extractbylocation', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(39)
if feedback.isCanceled():
return {}
# Refactoriser les champs 1
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"InputID\"','length': 0,'name': 'InputID','precision': 0,'type': 10},{'expression': '\"TargetID\"','length': 0,'name': 'TargetID','precision': 0,'type': 10},{'expression': '\"Distance\"','length': 0,'name': 'Distance','precision': 0,'type': 6},{'expression': '\'LDM proches\'','length': 0,'name': 'type','precision': 0,'type': 10}],
'INPUT': outputs['ExtrairePasLocalisation']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps1'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(40)
if feedback.isCanceled():
return {}
# Refactoriser les champs 3
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': '\"begin\"','length': 0,'name': 'begin','precision': 0,'type': 10},{'expression': '\"end\"','length': 0,'name': 'end','precision': 0,'type': 10},{'expression': '\'Intersection de deux LDM\'','length': 0,'name': 'type','precision': 0,'type': 10}],
'INPUT': outputs['Centrodes']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps3'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(41)
if feedback.isCanceled():
return {}
# Fusionner des couches vecteur
alg_params = {
'CRS': QgsCoordinateReferenceSystem('EPSG:2154'),
'LAYERS': [outputs['RefactoriserLesChamps1']['OUTPUT'],outputs['RefactoriserLesChamps3']['OUTPUT'],outputs['RefactoriserLesChamps2']['OUTPUT'],outputs['RefactoriserLesChamps4']['OUTPUT'],outputs['RefactoriserLesChamps5']['OUTPUT']],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['FusionnerDesCouchesVecteur'] = processing.run('native:mergevectorlayers', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(42)
if feedback.isCanceled():
return {}
# Joindre les attributs par localisation
alg_params = {
'DISCARD_NONMATCHING': False,
'INPUT': outputs['FusionnerDesCouchesVecteur']['OUTPUT'],
'JOIN': outputs['Pdm2154']['OUTPUT'],
'JOIN_FIELDS': [''],
'METHOD': 0,
'PREDICATE': [0],
'PREFIX': '',
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['JoindreLesAttributsParLocalisation'] = processing.run('native:joinattributesbylocation', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(43)
if feedback.isCanceled():
return {}
# Refactoriser les champs
alg_params = {
'FIELDS_MAPPING': [{'expression': '\"type\"','length': 0,'name': 'type','precision': 0,'type': 10},{'expression': '\"InputID\"','length': 0,'name': 'LDM-proche1','precision': 0,'type': 10},{'expression': '\"TargetID\"','length': 0,'name': 'LDM-proche2','precision': 0,'type': 10},{'expression': '\"LDMCode\"','length': 0,'name': 'LDMCode','precision': 0,'type': 10},{'expression': '\"LDMCodeRef\"','length': 0,'name': 'LDMCodeRef','precision': 0,'type': 10},{'expression': '\"PDMCode\"','length': 0,'name': 'PDMCode','precision': 0,'type': 10}],
'INPUT': outputs['JoindreLesAttributsParLocalisation']['OUTPUT'],
'OUTPUT': QgsProcessing.TEMPORARY_OUTPUT
}
outputs['RefactoriserLesChamps'] = processing.run('native:refactorfields', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
feedback.setCurrentStep(44)
if feedback.isCanceled():
return {}
# Supprimer les doublons par attribut
alg_params = {
'FIELDS': ['type','LDM-proche1','LDM-proche2','LDMCode','LDMCodeRef','PDMCode'],
'INPUT': outputs['RefactoriserLesChamps']['OUTPUT'],
'OUTPUT': parameters['Anomalies']
}
outputs['SupprimerLesDoublonsParAttribut'] = processing.run('native:removeduplicatesbyattribute', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
results['Anomalies'] = outputs['SupprimerLesDoublonsParAttribut']['OUTPUT']
feedback.setCurrentStep(45)
if feedback.isCanceled():
return {}
# Configurer un style pour une couche vecteur
alg_params = {
'INPUT': outputs['SupprimerLesDoublonsParAttribut']['OUTPUT'],
'STYLE': parameters['StyleZonesLDM (2)']
}
outputs['ConfigurerUnStylePourUneCoucheVecteur'] = processing.run('qgis:setstyleforvectorlayer', alg_params, context=context, feedback=feedback, is_child_algorithm=True)
return results
def name(self):
return 'Analyse export Prod'
def displayName(self):
return 'Analyse export Prod'
def group(self):
return ''
def groupId(self):
return ''
def createInstance(self):
return AnalyseExportProd()
# Exemple de script de lancement d'instance OpenTripPlanner
# Définition du chemin d'accès aux données :
cd C:\Users\boucherec\Documents\Stage\OTP
# Création du graphe :
java -jar -Xmx1100M otp-1.5.0-shaded.jar --build .\otp-data\transfrontalier
# Lancement du server OTP :
java -jar -Xmx1100M otp-1.5.0-shaded.jar --server --analyst --router transfrontalier --graphs otp-data --basePath .\
# Visualisation sur http://localhost:8080/