diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index e322411a..11507255 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -94,4 +94,4 @@ jobs:
path: staging
env:
- MAVEN_OPTS: -Xmx10G
\ No newline at end of file
+ MAVEN_OPTS: -Xmx512m
\ No newline at end of file
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
index 4ef94b1d..55ba46d1 100644
--- a/.github/workflows/publish.yaml
+++ b/.github/workflows/publish.yaml
@@ -17,4 +17,4 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
env:
- MAVEN_OPTS: -Xmx10G
\ No newline at end of file
+ MAVEN_OPTS: -Xmx512m
\ No newline at end of file
diff --git a/CITATION.cff b/CITATION.cff
new file mode 100644
index 00000000..e24476fd
--- /dev/null
+++ b/CITATION.cff
@@ -0,0 +1,38 @@
+# This CITATION.cff file was generated with cffinit.
+# Visit https://bit.ly/cffinit to generate yours today!
+
+cff-version: 1.2.0
+title: The MATSim Open Kelheim Scenario
+message: >-
+ If you use this software, please cite it using the
+ metadata from this file.
+type: software
+authors:
+ - given-names: Tilmann
+ family-names: Schlenther
+ email: schlenther@vsp.tu-berlin.de
+ affiliation: Technische Universität Berlin
+ orcid: 'https://orcid.org/0000-0001-6781-6918'
+ - given-names: Chengqi
+ family-names: Lu
+ email: lu@vsp.tu-berlin.de
+ affiliation: Technische Universität Berlin
+ - given-names: Christian
+ family-names: Rakow
+ email: rakow@vsp.tu-berlin.de
+ affiliation: Technische Universität Berlin
+ - given-names: Simon
+ family-names: Meinhardt
+ email: meinhardt@vsp.tu-berlin.de
+ affiliation: Technische Universität Berlin
+ - given-names: Kai
+ family-names: Nagel
+ email: nagel@vsp.tu-berlin.de
+ affiliation: Technische Universität Berlin
+ orcid: 'https://orcid.org/0000-0003-2775-6898'
+url: "https://github.com/matsim-scenarios/matsim-kelheim"
+doi: 10.5281/zenodo.8322240
+date-released: 2023-09-06
+year: 2023
+version: 3.0
+license: AGPL-3.0
diff --git a/README.md b/README.md
index bc944805..5c1eff26 100644
--- a/README.md
+++ b/README.md
@@ -2,14 +2,19 @@
[![Build Status](https://github.com/matsim-scenarios/matsim-kelheim/actions/workflows/build.yaml/badge.svg)](https://github.com/matsim-scenarios/matsim-kelheim/actions/workflows/build.yaml)
![license](https://img.shields.io/github/license/matsim-scenarios/matsim-kelheim.svg)
+[![DOI (v3.0)](https://zenodo.org/badge/360167859.svg)](https://zenodo.org/badge/latestdoi/360167859)
![JDK](https://img.shields.io/badge/JDK-17+-green.svg)
![Kelheim MATSim network and agents](visualization-kelheim.png "Kelheim MATSim network and agents")
+
+
+
+
### About this project
-This repository provides an open MATSim transport model for Kelheim, provided by the [Transport Systems Planning and Transport Telematics group](https://www.vsp.tu-berlin.de) of [Technische Universität Berlin](http://www.tu-berlin.de).
+This repository provides an open MATSim transport model for Kelheim, provided by the [Transport Systems Planning and Transport Telematics group](https://www.tu.berlin/vsp) of [Technische Universität Berlin](http://www.tu-berlin.de).
@@ -24,9 +29,11 @@ The **MATSim input files, output files, analysis data and visualizations** are l
**Other data files**, in particular in `original-input-data`, have their own individual licenses that need to be individually clarified with the copyright holders.
-### Note
+The input plans (person transport demand) for this project were generated based on data provided by [Senozon AG](https://senozon.com/).
+
+### Note (where to find input and output)
-Handling of large files within git is not without problems (git lfs files are not included in the zip download; we have to pay; ...). In consequence, large files, both on the input and on the output side, reside at https://svn.vsp.tu-berlin.de/repos/public-svn/matsim/scenarios/countries/de/kelheim .
+Handling of large files within git is not without problems (git lfs files are not included in the zip download; we have to pay; ...). In consequence, large files, both on the input and on the output side, reside at [the public matsim-kelheim data repo](https://svn.vsp.tu-berlin.de/repos/public-svn/matsim/scenarios/countries/de/kelheim).
----
### Run the MATSim Kelheim scenario
@@ -42,7 +49,8 @@ It can be used by using either of these methods:
1. Set up the project in your IDE.
1. Make sure the project is configured as maven project.
-1. Run the JAVA class `src/main/java/org/matsim/run/RunKelheimScenario.java` with the following program argument `run`. Add '--1pct' for test runs with a smaller sample size.
+1. Run the JAVA class `src/main/java/org/matsim/run/RunKelheimScenario.java` with the following program argument `run`.
+ 1. Add '--1pct' for test runs with a smaller sample size. Be aware that the model is calibrated with 25 pct, and outputs for 1 pct might be a little off.
1. "Open" the output directory. You can drag files into VIA as was already done above.
1. Edit the config file or adjust the run class. Re-run MATSim.
@@ -51,8 +59,26 @@ It can be used by using either of these methods:
1. Open the cmd and go to your project directory
2. Build the scenario using `mvnw package`. Add the option `-Dskiptests=true` in order to skip tests and speed up the process.
-3. There should be a file directly in the `matsim-kelheim` directory with name approximately as `matsim-kelheim-2.x.jar`.
-4. Run this file from the command line using `java -jar matsim-kelheim-2.x.jar --help` to see all possible options.
+3. There should be a file directly in the `matsim-kelheim` directory with name approximately as `matsim-kelheim-3.x-SNAPSHOT-.jar`.
+4. Run this file from the command line using `java -jar matsim-kelheim-3.x-SNAPSHOT-.jar --help` to see all possible options.
1. For example, one can disable lanes or run smaller sample sizes using the available options
-5. Start this scenario using the default config by running `java -jar matsim-kelheim-2.x.jar`.
-6. "Open" the output directory. You can drag files into VIA as was already done above.
+5. Start this scenario using the default config by running `java -jar matsim-kelheim-3.x-SNAPSHOT-.jar`.
+ 1. If you want to run the scenario somewhere else, e.g. on a computation cluster, make sure to not only copy the jar but also the 'input' directory and put it right next to the jar.
+6. "Open" the output directory.
+ 1. You can drag files into VIA as was already done above.
+ 2. You can also browse the output directory on vsp.berlin/simwrapper and analyze some of your results with interactive dashboards.
+
+----
+### Results and analysis
+
+Here are the most common ways to analyse and visualize the results (and inputs):
+
+1. [Simunto VIA](https://www.simunto.com/via/)
+2. [SimWrapper](https://www.vsp.berlin/simwrapper)
+ 1. (use Google for the best experience)
+ 2. Browse your local output directory or [the public matsim-kelheim data repo](https://vsp.berlin/simwrapper/public/de/kelheim)
+ 2. Explore and create many interactive visualisations and dashboards
+3. Analysis the output .csv tables using the R language and [the matsim-r package](https://github.com/matsim-vsp/matsim-r)
+
+If you have questions, feel free to contact us [(VSP)](https://www.tu.berlin/vsp) any time :)
+
diff --git a/input/shp/dilutionArea.cpg b/input/shp/dilutionArea.cpg
new file mode 100644
index 00000000..3ad133c0
--- /dev/null
+++ b/input/shp/dilutionArea.cpg
@@ -0,0 +1 @@
+UTF-8
\ No newline at end of file
diff --git a/input/shp/dilutionArea.dbf b/input/shp/dilutionArea.dbf
old mode 100755
new mode 100644
index 76fd2498..c33dbde7
Binary files a/input/shp/dilutionArea.dbf and b/input/shp/dilutionArea.dbf differ
diff --git a/input/shp/dilutionArea.fix b/input/shp/dilutionArea.fix
deleted file mode 100644
index 52c6bac1..00000000
Binary files a/input/shp/dilutionArea.fix and /dev/null differ
diff --git a/input/shp/dilutionArea.prj b/input/shp/dilutionArea.prj
old mode 100755
new mode 100644
index e69de29b..bd846aeb
--- a/input/shp/dilutionArea.prj
+++ b/input/shp/dilutionArea.prj
@@ -0,0 +1 @@
+PROJCS["ETRS_1989_UTM_Zone_32N",GEOGCS["GCS_ETRS_1989",DATUM["D_ETRS_1989",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",9.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]
\ No newline at end of file
diff --git a/input/shp/dilutionArea.qmd b/input/shp/dilutionArea.qmd
new file mode 100644
index 00000000..8bae0ceb
--- /dev/null
+++ b/input/shp/dilutionArea.qmd
@@ -0,0 +1,27 @@
+
+
+
+
+
+ dataset
+
+
+
+
+
+
+
+
+
+
+ 0
+ 0
+
+
+
+
+ false
+
+
+
+
diff --git a/input/shp/dilutionArea.shp b/input/shp/dilutionArea.shp
old mode 100755
new mode 100644
diff --git a/input/shp/dilutionArea.shx b/input/shp/dilutionArea.shx
old mode 100755
new mode 100644
diff --git a/input/test.config.xml b/input/test.config.xml
index 3be543af..a9f7e502 100644
--- a/input/test.config.xml
+++ b/input/test.config.xml
@@ -22,26 +22,26 @@
-
-
-
-
+
+
+
+
-
-
-
-
+
+
+
+
-
-
-
+
+
+
-
-
-
-
-
+
+
+
+
+
@@ -67,79 +67,77 @@
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
@@ -147,11 +145,12 @@
-
+
+
-
-
+
+
@@ -160,46 +159,48 @@
+
+
+
+
+
-
-
-
+
-
-
+
-
+
-
+
-
-
+
+
-
+
-
+
-
+
@@ -208,7 +209,7 @@
-
+
@@ -217,10 +218,11 @@
-
+
+
diff --git a/input/test.with-drt.config.xml b/input/test.with-drt.config.xml
index e5d76695..d32dc581 100644
--- a/input/test.with-drt.config.xml
+++ b/input/test.with-drt.config.xml
@@ -22,10 +22,10 @@
-
-
-
-
+
+
+
+
@@ -33,12 +33,12 @@
-
+
-
-
+
+
@@ -72,72 +72,70 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+
@@ -146,11 +144,12 @@
-
+
+
-
-
+
+
@@ -159,55 +158,62 @@
+
+
+
+
-
-
+
+
-
-
+
+
-
+
+
+
-
+
+
-
+
-
+
-
-
+
+
-
+
-
+
-
+
@@ -216,7 +222,7 @@
-
+
@@ -225,10 +231,11 @@
-
+
+
@@ -240,44 +247,62 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -305,17 +330,16 @@
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
diff --git a/input/v3.0/kelheim-v3.0-25pct.kexi.config.xml b/input/v3.0/kelheim-v3.0-25pct.kexi.config.xml
new file mode 100644
index 00000000..823714fe
--- /dev/null
+++ b/input/v3.0/kelheim-v3.0-25pct.kexi.config.xml
@@ -0,0 +1,343 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/input/v3.0/kelheim-v3.0-25pct.config.xml b/input/v3.0/kelheim-v3.0-config.xml
similarity index 68%
rename from input/v3.0/kelheim-v3.0-25pct.config.xml
rename to input/v3.0/kelheim-v3.0-config.xml
index 0841b826..4b7ba234 100644
--- a/input/v3.0/kelheim-v3.0-25pct.config.xml
+++ b/input/v3.0/kelheim-v3.0-config.xml
@@ -6,10 +6,10 @@
-
+
-
-
+
+
@@ -23,23 +23,23 @@
-
+
-
+
-
+
-
-
+
+
@@ -124,7 +124,7 @@
-
+
@@ -162,65 +162,64 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/input/v3.0/kelheim-v3.0-vehicle-types.xml b/input/v3.0/kelheim-v3.0-vehicle-types.xml
deleted file mode 100644
index ababb547..00000000
--- a/input/v3.0/kelheim-v3.0-vehicle-types.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 1394808b..132d2965 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
matsim-all
- 16.0-PR2560
+ 16.0-PR2750
@@ -166,6 +166,12 @@
+
+ org.matsim.contrib
+ simwrapper
+ ${matsim.version}
+
+
org.matsim.contrib
informed-mode-choice
@@ -200,8 +206,9 @@
3.0.0-M9
1
+ false
- @{argLine} -Xmx9500m -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true
+ @{argLine} -Xmx6500m -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true
diff --git a/src/main/R/drtAnalysis/av_modalShiftAnalysis.R b/src/main/R/drtAnalysis/av_modalShiftAnalysis.R
new file mode 100644
index 00000000..98c8ef0e
--- /dev/null
+++ b/src/main/R/drtAnalysis/av_modalShiftAnalysis.R
@@ -0,0 +1,94 @@
+library(dplyr)
+library(matsim)
+library(ggalluvial)
+library(ggplot2)
+
+# this is a script to compare trips / main_modes of av users in a base case to their corresponding mode in a policy case with reduced av max speed
+# some sankey plots are produced.
+
+setwd("Y:/net/ils/matsim-kelheim/kelheim-case-study/v2.0/caseStudy-badWeather/")
+
+#random seed 1111
+trips_1111_base_av <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1111-CORE") %>%
+ filter(main_mode == "av")
+trips_1111_12kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1111-bad-weather-1-CORE")
+trips_1111_9kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1111-bad-weather-2-CORE")
+trips_1111_6kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1111-bad-weather-3-CORE")
+
+base_12kmh_1111 <- plotModalShiftSankey(trips_1111_base_av, trips_1111_12kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_12kmh_1111 <- base_12kmh_1111 + ggtitle("base_12kmh_1111")
+base_12kmh_1111
+base_9kmh_1111 <- plotModalShiftSankey(trips_1111_base_av, trips_1111_9kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_9kmh_1111 <- base_9kmh_1111 + ggtitle("base_9kmh_1111")
+base_9kmh_1111
+base_6kmh_1111 <- plotModalShiftSankey(trips_1111_base_av, trips_1111_6kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_6kmh_1111 <- base_6kmh_1111 + ggtitle("base_6kmh_1111")
+base_6kmh_1111
+
+#random seed 1234
+trips_1234_base_av <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1234-CORE") %>%
+ filter(main_mode == "av")
+trips_1234_12kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1234-bad-weather-1-CORE")
+trips_1234_9kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1234-bad-weather-2-CORE")
+trips_1234_6kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed1234-bad-weather-3-CORE")
+
+base_12kmh_1234 <- plotModalShiftSankey(trips_1234_base_av, trips_1234_12kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_12kmh_1234 <- base_12kmh_1234 + ggtitle("base_12kmh_1234")
+base_12kmh_1234
+base_9kmh_1234 <- plotModalShiftSankey(trips_1234_base_av, trips_1234_9kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_9kmh_1234 <- base_9kmh_1234 + ggtitle("base_9kmh_1234")
+base_9kmh_1234
+base_6kmh_1234 <- plotModalShiftSankey(trips_1234_base_av, trips_1234_6kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_6kmh_1234 <- base_6kmh_1234 + ggtitle("base_6kmh_1234")
+base_6kmh_1234
+
+#random seed 2222
+trips_2222_base_av <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed2222-CORE") %>%
+ filter(main_mode == "av")
+trips_2222_12kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed2222-bad-weather-1-CORE")
+trips_2222_9kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed2222-bad-weather-2-CORE")
+trips_2222_6kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed2222-bad-weather-3-CORE")
+
+base_12kmh_2222 <- plotModalShiftSankey(trips_2222_base_av, trips_2222_12kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_12kmh_2222 <- base_12kmh_2222 + ggtitle("base_12kmh_2222")
+base_12kmh_2222
+base_9kmh_2222 <- plotModalShiftSankey(trips_2222_base_av, trips_2222_9kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_9kmh_2222 <- base_9kmh_2222 + ggtitle("base_9kmh_2222")
+base_9kmh_2222
+base_6kmh_2222 <- plotModalShiftSankey(trips_2222_base_av, trips_2222_6kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_6kmh_2222 <- base_6kmh_2222 + ggtitle("base_6kmh_2222")
+base_6kmh_2222
+
+#random seed 4711
+trips_4711_base_av <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed4711-CORE") %>%
+ filter(main_mode == "av")
+trips_4711_12kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed4711-bad-weather-1-CORE")
+trips_4711_9kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed4711-bad-weather-2-CORE")
+trips_4711_6kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed4711-bad-weather-3-CORE")
+
+base_12kmh_4711 <- plotModalShiftSankey(trips_4711_base_av, trips_4711_12kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_12kmh_4711 <- base_12kmh_4711 + ggtitle("base_12kmh_4711")
+base_12kmh_4711
+base_9kmh_4711 <- plotModalShiftSankey(trips_4711_base_av, trips_4711_9kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_9kmh_4711 <- base_9kmh_4711 + ggtitle("base_9kmh_4711")
+base_9kmh_4711
+base_6kmh_4711 <- plotModalShiftSankey(trips_4711_base_av, trips_4711_6kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_6kmh_4711 <- base_6kmh_4711 + ggtitle("base_6kmh_4711")
+base_6kmh_4711
+
+#random seed 5678
+trips_5678_base_av <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed5678-CORE") %>%
+ filter(main_mode == "av")
+trips_5678_12kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed5678-bad-weather-1-CORE")
+trips_5678_9kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed5678-bad-weather-2-CORE")
+trips_5678_6kmh <- readTripsTable(pathToMATSimOutputDirectory = "output-ASC-0.15-dist-0.00006-5_av-seed5678-bad-weather-3-CORE")
+
+base_12kmh_5678 <- plotModalShiftSankey(trips_5678_base_av, trips_5678_12kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_12kmh_5678 <- base_12kmh_5678 + ggtitle("base_12kmh_5678")
+base_12kmh_5678
+base_9kmh_5678 <- plotModalShiftSankey(trips_5678_base_av, trips_5678_9kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_9kmh_5678 <- base_9kmh_5678 + ggtitle("base_9kmh_5678")
+base_9kmh_5678
+base_6kmh_5678 <- plotModalShiftSankey(trips_5678_base_av, trips_5678_6kmh, dump.output.to = "C:/Users/Simon/Desktop/wd/2023-03-28")
+base_6kmh_5678 <- base_6kmh_5678 + ggtitle("base_6kmh_5678")
+base_6kmh_5678
diff --git a/src/main/R/drtAnalysis/kpi_averaging.R b/src/main/R/drtAnalysis/kpi_averaging.R
new file mode 100644
index 00000000..f4997782
--- /dev/null
+++ b/src/main/R/drtAnalysis/kpi_averaging.R
@@ -0,0 +1,111 @@
+#####libraries####
+library(stringr)
+library(tidyverse)
+
+#####global variables####
+path_to_data <- "path/to/data (folder of a specific case, with different seeds)"
+stats = c("mean","median","sd" ,"max", "min")
+
+##### Collect all folder names####
+folders_list<-list.files(path_to_data,full.names = TRUE)
+folders_seeded = list()
+
+for(i in 1:length(folders_list)){
+ if (endsWith(folders_list[i],".tsv")){
+ next
+ }
+
+ case_name = tail(str_split(folders_list[i],"-")[[1]],n = 1)
+
+ if(!case_name %in% names(folders_seeded)){
+ folders_seeded[[case_name]] = folders_list[i]
+ }else{
+ folders_seeded[[case_name]] = append(folders_seeded[[case_name]],folders_list[i])
+ }
+}
+
+#########################################
+#####Reading and averaging drt tables####
+folders_drt_averaged_table = list()
+
+for(case_name in names(folders_seeded)){
+ for(folder in folders_seeded[[case_name]]){
+ files_list<- list.files(paste0(folder,"/analysis-drt-service-quality"),full.names = TRUE)
+ drt_KPI_file <- files_list[grepl(pattern = "drt_KPI.tsv",files_list)]
+ cat("processing ",drt_KPI_file," \r\n")
+ drt_KPI_table <- read.delim(drt_KPI_file)
+
+ if(!case_name %in% names(folders_drt_averaged_table)){
+ folders_drt_averaged_table[[case_name]] = drt_KPI_table
+ }else{
+ folders_drt_averaged_table[[case_name]] = rbind(folders_drt_averaged_table[[case_name]],drt_KPI_table)
+ }
+
+ }
+
+ tbl_colnames = c("stat",colnames(folders_drt_averaged_table[[case_name]]))
+ result_tibble = tbl_colnames %>% purrr::map_dfc(setNames, object = list(numeric()))
+ for(stat in stats){
+ func = get(stat)
+ new_row = c(stat)
+ for(column in colnames(folders_drt_averaged_table[[case_name]])){
+ new_row = append(new_row,func(folders_drt_averaged_table[[case_name]][[column]]))
+ }
+
+ result_tibble = rbind(result_tibble,new_row)
+ }
+ colnames(result_tibble) = tbl_colnames
+
+ folders_drt_averaged_table[[case_name]] = result_tibble
+
+}
+
+print(folders_drt_averaged_table)
+
+#Write averaged drt tables####
+for(case_name in names(folders_drt_averaged_table)){
+ write.table(folders_drt_averaged_table[[case_name]],paste0(path_to_data, "/kpi_summary_drt_", case_name, ".tsv"),quote = FALSE,row.names = FALSE)
+}
+
+
+
+#########################################
+#####Reading and averaging av tables#####
+folders_av_averaged_table = list()
+for(case_name in names(folders_seeded)){
+ for(folder in folders_seeded[[case_name]]){
+ files_list<- list.files(paste0(folder,"/analysis-drt-service-quality"),full.names = TRUE)
+ av_KPI_file <- files_list[grepl(pattern = "av_KPI.tsv",files_list)]
+ cat("processing ",av_KPI_file," \r\n")
+ av_KPI_table <- read.delim(av_KPI_file)
+
+ if(!case_name %in% names(folders_av_averaged_table)){
+ folders_av_averaged_table[[case_name]] = av_KPI_table
+ }else{
+ folders_av_averaged_table[[case_name]] = rbind(folders_av_averaged_table[[case_name]],av_KPI_table)
+ }
+ }
+
+ tbl_colnames = c("stat",colnames(folders_av_averaged_table[[case_name]]))
+ result_tibble = tbl_colnames %>% purrr::map_dfc(setNames, object = list(numeric()))
+ for(stat in stats){
+ func = get(stat)
+ new_row = c(stat)
+ for(column in colnames(folders_av_averaged_table[[case_name]])){
+ new_row = append(new_row,func(folders_av_averaged_table[[case_name]][[column]]))
+ }
+ result_tibble = rbind(result_tibble,new_row)
+ }
+ colnames(result_tibble) = tbl_colnames
+ folders_av_averaged_table[[case_name]] = result_tibble
+}
+
+print(folders_av_averaged_table)
+
+#Write averaged av tables####
+for(case_name in names(folders_av_averaged_table)){
+ write.table(folders_av_averaged_table[[case_name]],paste0(path_to_data, "/kpi_summary_av_",case_name,".tsv"),quote = FALSE,row.names = FALSE)
+}
+
+
+
diff --git a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-analysis-for-calibration-VIA.R b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-analysis-for-calibration-VIA.R
index a1230442..0928d4e9 100644
--- a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-analysis-for-calibration-VIA.R
+++ b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-analysis-for-calibration-VIA.R
@@ -24,20 +24,24 @@ VIArides2021 <- read.csv2("VIA_Rides_202106_202201.csv", stringsAsFactors = FALS
VIArides2022_1 <- read.csv2("VIA_Rides_202201_202210.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIArides2022_2 <- read.csv2("VIA_Rides_202210_202212.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIArides2023_1 <- read.csv2("VIA_Rides_202212_202303.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
+VIArides2023_2 <- read.csv2("VIA_Rides_202304_202307.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIAridesAll <- union(VIArides2021, VIArides2022_1)
VIAridesAll <- union(VIAridesAll, VIArides2022_2)
-VIAridesAll <- union(VIAridesAll, VIArides2023_1) %>%
+VIAridesAll <- union(VIAridesAll, VIArides2023_1)
+VIAridesAll <- union(VIAridesAll, VIArides2023_2) %>%
filter(!is.na(Actual.Pickup.Time))
VIAridesSince2022 <- VIAridesAll %>%
filter(year(Actual.Pickup.Time) >= year(ymd("2022-01-01")))
-datasets <- list(VIArides2021, VIArides2022_1, VIArides2022_2, VIArides2023_1, VIAridesSince2022, VIAridesAll)
-names <- c("VIA_data_202106_202201","VIA_data_202201_202210","VIA_data_202210_202212","VIA_data_202212_202303","VIAdataSince2022","VIAdataAll")
+datasets <- list(VIArides2021, VIArides2022_1, VIArides2022_2, VIArides2023_1, VIArides2023_2, VIAridesSince2022, VIAridesAll)
+names <- c("VIA_data_202106_202201","VIA_data_202201_202210","VIA_data_202210_202212","VIA_data_202212_202303","VIA_data_202304_202307","VIAdataSince2022","VIAdataAll")
i <- 1
-avgValues <- setNames(data.frame(matrix(ncol = 5, nrow = 0)), c("dataset", "avgRidesPerDay", "avgDistance_<5km[m]", "avgDistance_withoutFilter[m]", "avgTravelTime[s]"))
+avgValues <- setNames(data.frame(matrix(ncol = 14, nrow = 0)), c("dataset", "avgBookingsPerDay", "avgDistance_<5km[m]", "avgDistance_withoutFilter[m]", "avgTravelTime[s]",
+ "avgBookingsPerDayInclCompanions", "noRides1Passenger", "noRides2Passengers", "noRides3Passenger", "noRides4Passenger", "noRides5Passenger",
+ "noRides6Passenger", "noRides7Passenger", "noRides8Passenger"))
for(dataset in datasets) {
print(paste0("Starting to calculate stats for dataset ",names[i]))
@@ -254,43 +258,68 @@ for(dataset in datasets) {
############################################################################################################################################################
- #calculate avg rides per day
- ridesPerDay <- ridesToConsider %>%
+ #calculate nr of passengers bins
+ distr <-ridesToConsider %>%
+ group_by(Number.of.Passengers) %>%
+ summarize(n =n())
+
+ passengerDistribution <- data.frame(c(1,2,3,4,5,6,7,8),c(0,0,0,0,0,0,0,0))
+ colnames(passengerDistribution) <- c("Number.of.Passengers","n")
+
+ for ( j in 1:8 ) {
+ ifelse(any(distr$Number.of.Passengers==j), passengerDistribution$n[j]<-distr$n[j],"no changes")
+ }
+
+ passengersPerDay <- ridesToConsider %>%
group_by(date) %>%
- tally()
+ summarise(noPassengers = sum(Number.of.Passengers))
+
+ #calculate avg bookings per day
+ dailyValues <- ridesToConsider %>%
+ group_by(date) %>%
+ summarise(noBookings = n()) %>%
+ left_join(passengersPerDay, by="date")
+
+ avgBookings <- mean(dailyValues$noBookings)
+ avgBookings
- avgRides <- mean(ridesPerDay$n)
- avgRides
+ avgBookingsInclCompanions <- mean(dailyValues$noPassengers)
+ avgBookingsInclCompanions
#save avg values into df
- avgValuesDataset <- data.frame(names[i],avgRides,avgDistance_m,avgDistance_m_withoutFilter,avgTravelTime_s)
+ avgValuesDataset <- data.frame(names[i],avgBookings,avgDistance_m,avgDistance_m_withoutFilter,avgTravelTime_s,avgBookingsInclCompanions,
+ as.integer(passengerDistribution$n[1]),as.integer(passengerDistribution$n[2]),as.integer(passengerDistribution$n[3]),as.integer(passengerDistribution$n[4]),
+ as.integer(passengerDistribution$n[5]),as.integer(passengerDistribution$n[6]),as.integer(passengerDistribution$n[7]),as.integer(passengerDistribution$n[8]))
names(avgValuesDataset) <- names(avgValues)
avgValues <- rbind(avgValues,avgValuesDataset)
- # avgValues$avgRidesPerDay <- avgRides
- # avgValues$`avgDistance[m]` <- avgDistance_m
- # avgValues$`avgDistance_withoutFilter[m]` <- avgDistance_m_withoutFilter
- # avgValues$`avgTravelTime[s]` <- avgTravelTime_s
-
- boxplot_daily_rides <- ggplot(ridesPerDay, aes(y=n)) +
+ boxplot_daily_bookings <- ggplot(dailyValues, aes(y=noBookings)) +
stat_boxplot(geom="errorbar", width=3) +
geom_boxplot(width=5) +
scale_y_continuous(n.breaks = 8) +
scale_x_discrete() +
stat_summary(fun=mean, geom="errorbar",aes(ymax=..y.., ymin=..y.., x=0),
width=5, colour="red") +
- labs(x="", y="rides", title=paste("Boxplot KEXI Rides per day for dataset", names[i])) +
+ labs(x="", y="bookings", title=paste("Boxplot KEXI bookings per day for dataset", names[i])) +
# labs(x="", y="travel distance [m]") + #for paper only
theme(plot.title = element_text(hjust=0.5, size=20, face="bold"), axis.text.y = element_text(size=24),
axis.title.y = element_text(size=25, face="bold"))
- plotFile = paste0("plots/",names[i],"/boxplot_KEXI_daily_rides.png")
+ plotFile = paste0("plots/",names[i],"/boxplot_KEXI_daily_bookings.png")
paste0("printing plot to ", plotFile)
ggsave(plotFile, limitsize = FALSE)
- #a typical day here can be seen as a day with no of rides close to the average no of rides (119)
- # typicalDays <- filter(ridesPerDay, between(n, avgRides - 3, avgRides + 3))
+ if (names[i] == "VIAdataSince2022") {
+ #a typical day here can be seen as a day with no of bookings close to the average no of bookings (159 passengers as of 0623)
+ # we are filtering for dataset VIAdataSince2022 as this is the agreed on data to use o calibrate against - sm30623
+ typicalDaysBookings <- filter(dailyValues, between(noBookings, avgBookings - 5, avgBookings + 5))
+ typicalDaysPassengers <- filter(dailyValues, between(noPassengers, avgBookingsInclCompanions - 5, avgBookingsInclCompanions + 5))
+
+ write.csv2(typicalDaysPassengers, "typical_days_passenger_demand_since2022.csv", quote = FALSE, row.names = FALSE)
+ }
+
+
# #5 days are chosen as typical references
# typicalDay_jul <- ymd("2021-07-21")
@@ -303,13 +332,13 @@ for(dataset in datasets) {
#
# # this is so ugly and hard coded right now, as you have to change the day you want to plot
# #but a for loop for this just does not seem to work -sm apr22
- # typicalDayRidesPerInterval <- ridesToConsider %>%
+ # typicalDayBookingsPerInterval <- BookingsToConsider %>%
# filter(date == typicalDay_jan) %>%
# mutate (interval = floor( (minute(Actual.Pickup.Time) + hour(Actual.Pickup.Time) * 60) / 5) ) %>%
# group_by(interval) %>%
# tally()
#
- # p <- typicalDayRidesPerInterval %>%
+ # p <- typicalDayBookingsPerInterval %>%
# ggplot( aes(x=interval*5/60, y=n)) +
# ggtitle(paste("Fahrten pro 5-Minuten-Intervall (VIA): typischer Tag im ", month(typicalDay_jan, label=TRUE))) +
# geom_area(fill="#69b3a2", alpha=0.5) +
@@ -318,17 +347,16 @@ for(dataset in datasets) {
# xlab("Stunde") +
# theme_ipsum()
#
- # plotFile = paste("typicalDays/KEXI_rides_VIA_", month(typicalDay_jan, label=TRUE), ".png")
+ # plotFile = paste("typicalDays/KEXI_bookings_VIA_", month(typicalDay_jan, label=TRUE), ".png")
# paste("printing plot to ", plotFile)
# png(plotFile, width = 1200, height = 800)
# p
# dev.off()
# ggplotly(p)
- # boxplot(ridesPerDay$n, main = "Boxplot KEXI Rides per day", ylab = "rides")
- # abline(h = avgRides - 2 * sd(ridesPerDay$n), col="red",lty=2)
- # abline(h = avgRides + 2 * sd(ridesPerDay$n), col="red",lty=2)
-
+ # boxplot(dailyValues$noBookings, main = "Boxplot KEXI bookings per day", ylab = "bookings")
+ # abline(h = avgBookings - 2 * sd(dailyValues$noBookings), col="red",lty=2)
+ # abline(h = avgBookings + 2 * sd(dailyValues$noBookings), col="red",lty=2)
i <- i + 1
}
diff --git a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-merge-data-VIA.R b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-merge-data-VIA.R
index cc7e9a2e..2cca48d1 100644
--- a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-merge-data-VIA.R
+++ b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-merge-data-VIA.R
@@ -10,6 +10,7 @@ VIAdata2021 <- read.csv2("Via_data_2022-02-08/Data_request_TUB_for_Kelheim-Actua
VIAdata2022_1 <- read.csv2("Via_data_2022-10-10/Data_request_TUB_for_Kelheim-Actual_Data-VIA_Feb_to_Oct_2022_raw.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", sep=",", skip = 1)
VIAdata2022_2 <- read.csv2("Via_data_2023-01-17/Data_request_TUB_for_Kelheim-Actual_Data-Oct-Dec_2022-Data_TUB_for_Kelheim-Actual_Data-Oct_to_Dec_22.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", sep=",", skip = 1)
VIAdata2023_1 <- read.csv2("Via_data_2023-04-19/Data_request_TUB_for_Kelheim-Actual_Data-Jan-Mar_2023-Kelheim-Actual_Data-Jan-Mar_2023.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", sep=",", skip = 1)
+VIAdata2023_2 <- read.csv2("Via_data_2023-07-10/Data_request_TUB_for_Kelheim-Actual_Data-Apr-Jul_2023-Kelheim-Actual_Data-Apr-Jul_23.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", sep=",", skip = 1)
# here it makes sense to switch to column names from 2022 data and newer as
# column names for all files but the 2021 data are the same
@@ -46,14 +47,21 @@ VIAdata2023_1 <- VIAdata2023_1 %>%
Reason.For.Travel = ifelse(Reason.For.Travel != "AV","DR","AV"),
Request.Creation.Time = ymd_hms(Request.Creation.Time))
+VIAdata2023_2 <- VIAdata2023_2 %>%
+ mutate(Ride.ID = NA,
+ Reason.For.Travel = ifelse(Reason.For.Travel != "AV","DR","AV"),
+ Request.Creation.Time = ymd_hms(Request.Creation.Time))
+
write.csv2(VIAdata2021, "Via_data_2022-02-08/Data_request_TUB_for_Kelheim-Actual_Data-VIA_edited.csv", quote = FALSE, row.names = FALSE)
write.csv2(VIAdata2022_1, "Via_data_2022-10-10/Data_request_TUB_for_Kelheim-Actual_Data-VIA_Feb_to_Oct_2022_edited_cleaned.csv", quote = FALSE, row.names = FALSE)
write.csv2(VIAdata2022_2, "Via_data_2023-01-17/Data_request_TUB_for_Kelheim-Actual_Data-Oct-Dec_2022-Data_TUB_for_Kelheim-Actual_Data-Oct_to_Dec_22_edited.csv", quote = FALSE, row.names = FALSE)
write.csv2(VIAdata2023_1, "Via_data_2023-04-19/Data_request_TUB_for_Kelheim-Actual_Data-Jan-Mar_2023-Kelheim-Actual_Data-Jan-Mar_2023_edited.csv", quote = FALSE, row.names = FALSE)
+write.csv2(VIAdata2023_2, "Via_data_2023-07-10/Data_request_TUB_for_Kelheim-Actual_Data-Apr-Jul_2023-Kelheim-Actual_Data-Apr-Jul_23_edited.csv", quote = FALSE, row.names = FALSE)
allData <- union(VIAdata2021, VIAdata2022_1)
allData <- union(allData, VIAdata2022_2)
-allData <- union(allData, VIAdata2023_1) %>%
+allData <- union(allData, VIAdata2023_1)
+allData <- union(allData, VIAdata2023_2) %>%
distinct(Request.ID, .keep_all = TRUE)
#filter
@@ -72,6 +80,9 @@ completedRides2022_2 <- VIAdata2022_2 %>%
completedRides2023_1 <- VIAdata2023_1 %>%
filter(Request.Status == "Completed")
+completedRides2023_2 <- VIAdata2023_2 %>%
+ filter(Request.Status == "Completed")
+
saturday_rides <- completedRides %>%
mutate(Actual.Pickup.Time = ymd_hms(Actual.Pickup.Time)) %>%
mutate(weekday = wday(Actual.Pickup.Time, label = TRUE)) %>%
@@ -97,9 +108,14 @@ saturday_rides2023_1 <- completedRides2023_1 %>%
mutate(weekday = wday(Actual.Pickup.Time, label = TRUE)) %>%
filter(weekday == "Sa")
+saturday_rides2023_2 <- completedRides2023_2 %>%
+ mutate(Actual.Pickup.Time = ymd_hms(Actual.Pickup.Time)) %>%
+ mutate(weekday = wday(Actual.Pickup.Time, label = TRUE)) %>%
+ filter(weekday == "Sa")
+
#dump output
-write.csv2(completedRides, "VIA_Rides_202106_202210.csv", quote = FALSE, row.names = FALSE)
-write.csv2(saturday_rides, "VIA_Rides_Saturdays_202106_202210.csv", quote = FALSE, row.names = FALSE)
+write.csv2(completedRides, "VIA_Rides_202106_202303.csv", quote = FALSE, row.names = FALSE)
+write.csv2(saturday_rides, "VIA_Rides_Saturdays_202106_202303.csv", quote = FALSE, row.names = FALSE)
write.csv2(completedRides2021, "VIA_Rides_202106_202201.csv", quote = FALSE, row.names = FALSE)
write.csv2(saturday_rides2021, "VIA_Rides_Saturdays_202106_202201.csv", quote = FALSE, row.names = FALSE)
write.csv2(completedRides2022_1, "VIA_Rides_202201_202210.csv", quote = FALSE, row.names = FALSE)
@@ -108,4 +124,6 @@ write.csv2(completedRides2022_2, "VIA_Rides_202210_202212.csv", quote = FALSE, r
write.csv2(saturday_rides2022_2, "VIA_Rides_Saturdays_202210_202212.csv", quote = FALSE, row.names = FALSE)
write.csv2(completedRides2023_1, "VIA_Rides_202212_202303.csv", quote = FALSE, row.names = FALSE)
write.csv2(saturday_rides2023_1, "VIA_Rides_Saturdays_202212_202303.csv", quote = FALSE, row.names = FALSE)
+write.csv2(completedRides2023_2, "VIA_Rides_202304_202307.csv", quote = FALSE, row.names = FALSE)
+write.csv2(saturday_rides2023_2, "VIA_Rides_Saturdays_202304_202307.csv", quote = FALSE, row.names = FALSE)
diff --git a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-plot-time-series-VIA.R b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-plot-time-series-VIA.R
index 2d875fea..f36ad1ea 100644
--- a/src/main/R/drtDemandAnalysis/VIA-data/KEXI-plot-time-series-VIA.R
+++ b/src/main/R/drtDemandAnalysis/VIA-data/KEXI-plot-time-series-VIA.R
@@ -17,18 +17,20 @@ VIAdata2021 <- read.csv2("Via_data_2022-02-08/Data_request_TUB_for_Kelheim-Actua
VIAdata2022_1 <- read.csv2("Via_data_2022-10-10/Data_request_TUB_for_Kelheim-Actual_Data-VIA_Feb_to_Oct_2022_edited_cleaned.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIAdata2022_2 <- read.csv2("Via_data_2023-01-17/Data_request_TUB_for_Kelheim-Actual_Data-Oct-Dec_2022-Data_TUB_for_Kelheim-Actual_Data-Oct_to_Dec_22_edited.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIAdata2023_1 <- read.csv2("Via_data_2023-04-19/Data_request_TUB_for_Kelheim-Actual_Data-Jan-Mar_2023-Kelheim-Actual_Data-Jan-Mar_2023_edited.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
+VIAdata2023_2 <- read.csv2("Via_data_2023-07-10/Data_request_TUB_for_Kelheim-Actual_Data-Apr-Jul_2023-Kelheim-Actual_Data-Apr-Jul_23_edited.csv", stringsAsFactors = FALSE, header = TRUE, encoding = "UTF-8", na.strings="")
VIAdataAll <- union(VIAdata2021, VIAdata2022_1)
VIAdataAll <- union(VIAdataAll, VIAdata2022_2)
-VIAdataAll <- union(VIAdataAll, VIAdata2023_1) %>%
+VIAdataAll <- union(VIAdataAll, VIAdata2023_1)
+VIAdataAll <- union(VIAdataAll, VIAdata2023_2) %>%
distinct(Request.ID, .keep_all = TRUE)
VIAdataSince2022 <- VIAdataAll %>%
filter(year(Actual.Pickup.Time) >= year(ymd("2022-01-01")))
-datasets <- list(VIAdata2021, VIAdata2022_1, VIAdata2022_2, VIAdata2023_1, VIAdataSince2022, VIAdataAll)
-names <- c("VIA_data_202106_202201","VIA_data_202201_202210","VIA_data_202210_202212","VIA_data_202212_202303","VIAdataSince2022","VIAdataAll")
+datasets <- list(VIAdata2021, VIAdata2022_1, VIAdata2022_2, VIAdata2023_1, VIAdata2023_2, VIAdataSince2022, VIAdataAll)
+names <- c("VIA_data_202106_202201","VIA_data_202201_202210","VIA_data_202210_202212","VIA_data_202212_202303","VIA_data_202304_202307","VIAdataSince2022","VIAdataAll")
i <- 1
print("Starting to print different plots!")
@@ -225,7 +227,7 @@ for(dataset in datasets) {
ridesPerIntervals <- ridesPerInterval %>%
mutate(interval5 = format(round(interval*5/60, 2), nsmall = 2))
class.df <- data.frame(ridesPerIntervals$interval5,ridesPerIntervals$n, stringsAsFactors = FALSE)
- write.csv2(class.df,paste0("KEXI_",names[i],"_rides_daily_VIA.csv"),quote=FALSE,row.names=FALSE)
+ # write.csv2(class.df,paste0("KEXI_",names[i],"_rides_daily_VIA.csv"),quote=FALSE,row.names=FALSE)
p <- ggplot(data=ridesPerInterval) +
geom_line(mapping=aes(x=interval*5/60, y=n), col="#69b3a2") +
diff --git a/src/main/R/drtDemandAnalysis/VIA-data/real-demand-extractor.R b/src/main/R/drtDemandAnalysis/VIA-data/real-demand-extractor.R
new file mode 100644
index 00000000..d61507ec
--- /dev/null
+++ b/src/main/R/drtDemandAnalysis/VIA-data/real-demand-extractor.R
@@ -0,0 +1,58 @@
+library(tidyverse)
+library(lubridate)
+
+raw_data <- read.csv("VIA_Rides_202106_202303.csv", sep=";") %>%
+ filter(Actual.Pickup.Time != "") %>%
+ separate(Requested.Pickup.Time, into = c("requested_departure_date", "requested_departure_time"), sep = " ") %>%
+ separate(Actual.Pickup.Time, into = c("actual_departure_date", "actual_departure_time"), sep = " ") %>%
+ separate(Request.Creation.Time, into = c("request_generation_date", "request_generation_time"), sep = " ")
+
+processed_data <- tibble(
+ requested_date = c(raw_data$requested_departure_date),
+ requested_time = c(raw_data$requested_departure_time),
+ number_of_passengers = c(raw_data$Number.of.Passengers),
+ from_x = c(raw_data$Origin.Lng),
+ from_y = c(raw_data$Origin.Lat),
+ to_x = c(raw_data$Destination.Lng),
+ to_y = c(raw_data$Destination.Lat),
+ actual_departure_time = c(raw_data$actual_departure_time)
+ ) %>%
+ mutate(time_in_seconds = period_to_seconds(hms(requested_time))) %>%
+ mutate(delay_in_departure = period_to_seconds(hms(actual_departure_time)) - period_to_seconds(hms(requested_time))) %>%
+ relocate(time_in_seconds, .before = from_x) %>%
+ filter(!is.na(requested_time))
+ #TODO here, we filter out the entry with requested arrival time (which is not many)
+
+
+# daily_data <- processed_data %>%
+# filter(requested_date == "26.11.2020")
+# ggplot(data = daily_data) +
+# geom_histogram(mapping = aes(x=delay_in_departure), binwidth=60)
+# scale_x_continuous(limits=c(-3600,3600))
+
+# 2022-02-17 Thursday
+output_data_20220217 <- processed_data %>%
+ filter(requested_date == "2022-02-17")
+write_csv(output_data_20220217, "Kelheim/extracted-daily-demands/2022-02-17-demand.csv")
+
+# 2022-04-05 Tuesday
+output_data_20220405 <- processed_data %>%
+ filter(requested_date == "2022-04-05")
+write_csv(output_data_20220405, "Kelheim/extracted-daily-demands/2022-04-05-demand.csv")
+
+# 2022-05-17 Tuesday
+output_data_20220517 <- processed_data %>%
+ filter(requested_date == "2022-05-17")
+write_csv(output_data_20220517, "Kelheim/extracted-daily-demands/2022-05-17-demand.csv")
+
+
+#2022-09-28 Wednesday
+output_data_20220928 <- processed_data %>%
+ filter(requested_date == "2022-09-28")
+write_csv(output_data_20220928, "Kelheim/extracted-daily-demands/2022-09-28-demand.csv")
+
+#2023-01-19 Thursday
+output_data_20230119 <- processed_data %>%
+ filter(requested_date == "2023-01-19")
+write_csv(output_data_20230119, "Kelheim/extracted-daily-demands/2023-01-19-demand.csv")
+
diff --git a/src/main/R/mid.csv b/src/main/R/mid.csv
deleted file mode 100644
index 12275451..00000000
--- a/src/main/R/mid.csv
+++ /dev/null
@@ -1,31 +0,0 @@
-dist_group,mode,share
-0 - 1000,car,0.0557
-1000 - 2000,car,0.0758
-2000 - 5000,car,0.1507
-5000 - 10000,car,0.1139
-10000 - 20000,car,0.098
-20000+,car,0.0935
-0 - 1000,walk,0.0804
-1000 - 2000,walk,0.0318
-2000 - 5000,walk,0.0165
-5000 - 10000,walk,0.0044
-10000 - 20000,walk,0.0009
-20000+,walk,0.0006
-0 - 1000,pt,0.001
-1000 - 2000,pt,0.0012
-2000 - 5000,pt,0.0086
-5000 - 10000,pt,0.007
-10000 - 20000,pt,0.0061
-20000+,pt,0.0101
-0 - 1000,bike,0.0247
-1000 - 2000,bike,0.0242
-2000 - 5000,bike,0.0235
-5000 - 10000,bike,0.0063
-10000 - 20000,bike,0.0025
-20000+,bike,0.0019
-0 - 1000,ride,0.0144
-1000 - 2000,ride,0.0212
-2000 - 5000,ride,0.0447
-5000 - 10000,ride,0.0316
-10000 - 20000,ride,0.0257
-20000+,ride,0.0265
diff --git a/src/main/R/mid_adj.csv b/src/main/R/mid_adj.csv
deleted file mode 100644
index ddfca9f3..00000000
--- a/src/main/R/mid_adj.csv
+++ /dev/null
@@ -1,31 +0,0 @@
-dist_group,mode,share
-0 - 1000,car,0.04917652958739915
-1000 - 2000,car,0.04956785176611809
-2000 - 5000,car,0.10849424221801696
-5000 - 10000,car,0.15076819523354637
-10000 - 20000,car,0.14211152124057438
-20000+,car,0.11194215806986973
-0 - 1000,walk,0.07098371595739482
-1000 - 2000,walk,0.0207949562818279
-2000 - 5000,walk,0.011878931629709887
-5000 - 10000,walk,0.005824232300505742
-10000 - 20000,walk,0.0013051058073113972
-20000+,walk,0.0007183453993788432
-0 - 1000,pt,0.00088288203927108
-1000 - 2000,pt,0.000784715331389732
-2000 - 5000,pt,0.00619144315245485
-5000 - 10000,pt,0.009265824114440954
-10000 - 20000,pt,0.008845717138443916
-20000+,pt,0.012092147556210528
-0 - 1000,bike,0.021807186369995676
-1000 - 2000,bike,0.0158250925163596
-2000 - 5000,bike,0.01691847838170802
-5000 - 10000,bike,0.00833924170299686
-10000 - 20000,bike,0.0036252939091983255
-20000+,bike,0.002274760431366337
-0 - 1000,ride,0.012713501365503551
-1000 - 2000,ride,0.013863304187885268
-2000 - 5000,ride,0.03218110568775951
-5000 - 10000,ride,0.04182857743090488
-10000 - 20000,ride,0.03726802138655879
-20000+,ride,0.03172692180589891
diff --git a/src/main/R/sim.csv b/src/main/R/sim.csv
new file mode 100644
index 00000000..22dd8fa5
--- /dev/null
+++ b/src/main/R/sim.csv
@@ -0,0 +1,31 @@
+dist_group,main_mode,trips,mode,scaled_trips,source
+0 - 1000,bike,98,bike,392,sim
+0 - 1000,car,4605,car,18420,sim
+0 - 1000,pt,23,pt,92,sim
+0 - 1000,ride,315,ride,1260,sim
+0 - 1000,walk,8705,walk,34820,sim
+1000 - 2000,bike,977,bike,3908,sim
+1000 - 2000,car,5422,car,21688,sim
+1000 - 2000,pt,134,pt,536,sim
+1000 - 2000,ride,1849,ride,7396,sim
+1000 - 2000,walk,1415,walk,5660,sim
+2000 - 5000,bike,2610,bike,10440,sim
+2000 - 5000,car,11878,car,47512,sim
+2000 - 5000,pt,590,pt,2360,sim
+2000 - 5000,ride,5606,ride,22424,sim
+2000 - 5000,walk,453,walk,1812,sim
+5000 - 10000,bike,1851,bike,7404,sim
+5000 - 10000,car,15732,car,62928,sim
+5000 - 10000,pt,1050,pt,4200,sim
+5000 - 10000,ride,5632,ride,22528,sim
+5000 - 10000,walk,386,walk,1544,sim
+10000 - 20000,bike,1156,bike,4624,sim
+10000 - 20000,car,14101,car,56404,sim
+10000 - 20000,pt,975,pt,3900,sim
+10000 - 20000,ride,3144,ride,12576,sim
+10000 - 20000,walk,287,walk,1148,sim
+20000+,bike,466,bike,1864,sim
+20000+,car,9947,car,39788,sim
+20000+,pt,1128,pt,4512,sim
+20000+,ride,634,ride,2536,sim
+20000+,walk,136,walk,544,sim
diff --git a/src/main/R/srv.R b/src/main/R/srv.R
index 90d386df..9ff94757 100644
--- a/src/main/R/srv.R
+++ b/src/main/R/srv.R
@@ -8,24 +8,30 @@ library(sf)
source("https://raw.githubusercontent.com/matsim-scenarios/matsim-duesseldorf/master/src/main/R/theme.R")
-setwd("PLEASE ADJUST TO YOUR LOCAL DIRECTORY FOR matsim-kelheim/src/main/R")
+setwd("D:/git/matsim-kelheim/src/main/R")
theme_set(theme_Publication(18))
# trip distance groups
-levels = c("0 - 1000", "1000 - 2000", "2000 - 5000", "5000 - 10000", "10000 - 20000", "20000+")
-breaks = c(0, 1000, 2000, 5000, 10000, 20000, Inf)
+levels <- c("0 - 1000", "1000 - 2000", "2000 - 5000", "5000 - 10000", "10000 - 20000", "20000+")
+breaks <- c(0, 1000, 2000, 5000, 10000, 20000, Inf)
-shape <- st_read("../../../scenarios/input/shp/dilutionArea.shp", crs=25832)
+shape <- st_read("../../../input/shp/dilutionArea.shp", crs=25832)
#########
# Read simulation data
#########
sim_scale <- 4 # set to 4 for 25pct, 10 for 10pct, 100 for 1pct, ...
-f <- "../../../output/output-kelheim-25pct/" # set to run output directory
+#f <- "../../../output/output-kelheim-25pct/" # set to run output directory
-homes <- read_csv("../../../scenarios/input/kelheim-v3.0-homes.csv",
+
+f <- "//sshfs.r/schlenther@cluster.math.tu-berlin.de/net/ils/matsim-kelheim/calibration-v3.0-noMgnUtl/runs/014-cnt/" # set to run output directory
+f <- "//sshfs.r/schlenther@cluster.math.tu-berlin.de/net/ils/matsim-kelheim/calibration-ride12/calibration-bike-3-mc/runs/009/" # set to run output directory
+
+
+
+homes <- read_csv("https://svn.vsp.tu-berlin.de/repos/public-svn/matsim/scenarios/countries/de/kelheim/kelheim-v3.0/input/kelheim-v3.0-homes.csv",
col_types = cols(
person = col_character()
))
@@ -57,12 +63,14 @@ sim <- trips %>%
mutate(scaled_trips=sim_scale * trips) %>%
mutate(source = "sim")
+# Use this to write file needed to do adjustment
+#write_csv(sim, "sim.csv")
+
########
# Read survey data
########
-srv <- read_csv("mid_adj.csv") %>%
- mutate(main_mode=mode) %>%
+srv <- read_csv("../resources/kelheim_mode_share.csv") %>%
mutate(scaled_trips=122258 * 3.2 * share) %>%
mutate(source = "mid") %>%
mutate(dist_group=fct_relevel(dist_group, levels)) %>%
@@ -73,9 +81,9 @@ srv <- read_csv("mid_adj.csv") %>%
#######
srv_aggr <- srv %>%
- group_by(mode) %>%
+ group_by(main_mode) %>%
summarise(share=sum(share)) %>% # assume shares sum to 1
- mutate(mode=fct_relevel(mode, "walk", "bike", "pt", "ride", "car"))
+ mutate(mode=fct_relevel(main_mode, "walk", "bike", "pt", "ride", "car"))
aggr <- sim %>%
group_by(mode) %>%
@@ -105,7 +113,7 @@ combined + plot_layout(guides = "collect")
g <- arrangeGrob(p1_aggr, p2_aggr, ncol = 2)
g
-out <- file.path(f, "analysis-mode-choice")
+out <- file.path(f, "R-analysis-mode-choice")
if(!file.exists(out)){
print("creating analysis sub-directory")
dir.create(out)
@@ -118,6 +126,9 @@ ggsave(filename = "modal-split.png", path = out, g,
# Combined plot by distance
##########
+srv <- srv %>%
+ mutate(mode = main_mode)
+
total <- bind_rows(srv, sim) %>%
mutate(mode=fct_relevel(mode, "walk", "bike", "pt", "ride", "car"))
@@ -149,23 +160,22 @@ sim_aggr <- sim %>%
# Needed share of trips
tripShare <- 0.19
shortDistance <- sum(filter(sim, dist_group=="0 - 1000")$trips)
-numTrips = (shortDistance - sim_sum * tripShare) / (tripShare - 1)
+numTrips <- (shortDistance - sim_sum * tripShare) / (tripShare - 1)
##########################
# Distance distributions based on RegioStar data
##########################
-levels = c("0 - 500", "500 - 1000", "1000 - 2000", "2000 - 5000", "5000 - 10000",
- "10000 - 20000", "20000 - 50000", "50000 - 100000", "100000+")
-
-breaks = c(0, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000, Inf)
+levels <- c("0 - 1000", "1000 - 2000", "2000 - 5000", "5000 - 10000", "10000 - 20000", "20000+")
+breaks <- c(0, 1000, 2000, 5000, 10000, 20000, Inf)
trips2 <- trips %>%
mutate(dist_group = cut(traveled_distance, breaks=breaks, labels=levels, right = F)) %>%
mutate(mode = fct_relevel(main_mode, "walk", "bike", "pt", "ride", "car"))
-rs <- read_csv("tidied-mode-share-per-distance.csv") %>%
+rs <- read_csv("../resources/kelheim_mode_share_per_dist.csv") %>%
+ mutate(mode = main_mode) %>%
mutate(source="rs")
sim <- trips2 %>%
@@ -178,7 +188,7 @@ sim <- mutate(sim, share=trips/sum(sim$trips))
total_distance_dist <- bind_rows(filter(rs, mode=="total_distance_distribution"), sim)
dist_order <- factor(total_distance_dist$dist_group, level = levels)
-dist_order <- fct_explicit_na(dist_order, "100000+")
+dist_order <- fct_explicit_na(dist_order, "20000+")
g <- ggplot(total_distance_dist, aes(y=share, x=source, fill=source)) +
@@ -205,7 +215,7 @@ by_distance <- bind_rows(filter(rs, mode!="total_distance_distribution"), sim) %
mutate(mode=fct_relevel(mode, "walk", "bike", "pt", "ride", "car"))
dist_order <- factor(by_distance$dist_group, level = levels)
-dist_order <- fct_explicit_na(dist_order, "100000+")
+dist_order <- fct_explicit_na(dist_order, "20000+")
g <- ggplot(by_distance, aes(y=share, x=source, fill=mode)) +
labs(subtitle = paste("Kelheim scenario", substring(f, 52)), x="distance [m]", y="share") +
@@ -218,3 +228,4 @@ g
ggsave(filename = "modal-distance-distribution-relative.png", path = out, g,
width = 12, height = 10, device='png', dpi=300)
+
diff --git a/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java
index a0de17b6..db49ee7f 100644
--- a/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java
+++ b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java
@@ -17,6 +17,7 @@
import org.matsim.api.core.v01.network.Network;
import org.matsim.application.MATSimAppCommand;
import org.matsim.contrib.common.util.DistanceUtils;
+import org.matsim.contrib.drt.extension.DrtWithExtensionsConfigGroup;
import org.matsim.contrib.drt.run.DrtConfigGroup;
import org.matsim.contrib.drt.run.MultiModeDrtConfigGroup;
import org.matsim.core.config.Config;
@@ -78,7 +79,7 @@ public Integer call() throws Exception {
Files.createDirectory(outputFolder);
}
- Config config = ConfigUtils.loadConfig(configPath.toString());
+ Config config = ConfigUtils.loadConfig(configPath.toString(), new MultiModeDrtConfigGroup(DrtWithExtensionsConfigGroup::new));
int lastIteration = config.controler().getLastIteration();
String runId = config.controler().getRunId();
Path folderOfLastIteration = Path.of(directory.toString() + "/ITERS/it." + lastIteration);
diff --git a/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java b/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java
index 02f0e3c6..fa5d9b62 100644
--- a/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java
+++ b/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java
@@ -12,8 +12,8 @@
import org.matsim.api.core.v01.population.Person;
import org.matsim.api.core.v01.population.Population;
import org.matsim.application.MATSimAppCommand;
-import org.matsim.application.analysis.DefaultAnalysisMainModeIdentifier;
import org.matsim.core.population.PopulationUtils;
+import org.matsim.core.router.DefaultAnalysisMainModeIdentifier;
import org.matsim.core.router.MainModeIdentifier;
import org.matsim.core.router.TripStructureUtils;
import org.matsim.core.utils.geometry.CoordUtils;
diff --git a/src/main/java/org/matsim/dashboards/KelheimDashboardProvider.java b/src/main/java/org/matsim/dashboards/KelheimDashboardProvider.java
new file mode 100644
index 00000000..40a8de49
--- /dev/null
+++ b/src/main/java/org/matsim/dashboards/KelheimDashboardProvider.java
@@ -0,0 +1,32 @@
+package org.matsim.dashboards;
+
+import org.matsim.core.config.Config;
+import org.matsim.core.utils.io.IOUtils;
+import org.matsim.run.RunKelheimScenario;
+import org.matsim.simwrapper.Dashboard;
+import org.matsim.simwrapper.DashboardProvider;
+import org.matsim.simwrapper.SimWrapper;
+import org.matsim.simwrapper.dashboard.TravelTimeComparisonDashboard;
+import org.matsim.simwrapper.dashboard.TripDashboard;
+
+import java.util.List;
+
+/**
+ * Provider for default dashboards in the scenario.
+ * Declared in META-INF/services
+ */
+public class KelheimDashboardProvider implements DashboardProvider {
+
+ @Override
+ public List getDashboards(Config config, SimWrapper simWrapper) {
+
+ TripDashboard trips = new TripDashboard("kelheim_mode_share.csv", "kelheim_mode_share_per_dist.csv", null);
+
+ trips.setAnalysisArgs("--dist-groups", "0,1000,2000,5000,10000,20000");
+ return List.of(
+ trips,
+ new TravelTimeComparisonDashboard(IOUtils.resolveFileOrResource( "kelheim-v" + RunKelheimScenario.VERSION + "-routes-ref.csv.gz").toString())
+ );
+ }
+
+}
diff --git a/src/main/java/org/matsim/run/RunKelheimScenario.java b/src/main/java/org/matsim/run/RunKelheimScenario.java
index 795ff5e5..b4995552 100644
--- a/src/main/java/org/matsim/run/RunKelheimScenario.java
+++ b/src/main/java/org/matsim/run/RunKelheimScenario.java
@@ -21,7 +21,6 @@
import org.matsim.application.MATSimApplication;
import org.matsim.application.analysis.CheckPopulation;
import org.matsim.application.analysis.traffic.LinkStats;
-import org.matsim.application.analysis.travelTimeValidation.TravelTimeAnalysis;
import org.matsim.application.options.SampleOptions;
import org.matsim.application.prepare.CreateLandUseShp;
import org.matsim.application.prepare.freight.tripExtraction.ExtractRelevantFreightTrips;
@@ -45,7 +44,6 @@
import org.matsim.core.api.experimental.events.EventsManager;
import org.matsim.core.config.Config;
import org.matsim.core.config.ConfigUtils;
-import org.matsim.core.config.groups.PlanCalcScoreConfigGroup;
import org.matsim.core.config.groups.PlansCalcRouteConfigGroup;
import org.matsim.core.config.groups.VspExperimentalConfigGroup;
import org.matsim.core.controler.AbstractModule;
@@ -54,100 +52,100 @@
import org.matsim.core.scoring.functions.ScoringParametersForPerson;
import org.matsim.drtFare.KelheimDrtFareModule;
import org.matsim.extensions.pt.routing.ptRoutingModes.PtIntermodalRoutingModesConfigGroup;
-import org.matsim.modechoice.ModeOptions;
-import org.matsim.modechoice.commands.StrategyOptions;
-import org.matsim.modechoice.estimators.DefaultActivityEstimator;
-import org.matsim.modechoice.estimators.DefaultLegScoreEstimator;
-import org.matsim.modechoice.estimators.FixedCostsEstimator;
-import org.matsim.modechoice.pruning.DistanceBasedPruner;
import org.matsim.run.prepare.PrepareNetwork;
import org.matsim.run.prepare.PreparePopulation;
-import org.matsim.run.utils.KelheimCaseStudyTool;
+import org.matsim.simwrapper.SimWrapperConfigGroup;
+import org.matsim.simwrapper.SimWrapperModule;
import org.matsim.vehicles.VehicleType;
import picocli.CommandLine;
import playground.vsp.pt.fare.DistanceBasedPtFareParams;
import playground.vsp.pt.fare.PtFareConfigGroup;
-import playground.vsp.pt.fare.PtTripWithDistanceBasedFareEstimator;
import playground.vsp.scoring.IncomeDependentUtilityOfMoneyPersonScoringParameters;
import javax.annotation.Nullable;
-import java.util.*;
+import java.util.List;
+import java.util.Set;
+import java.util.SplittableRandom;
@CommandLine.Command(header = ":: Open Kelheim Scenario ::", version = RunKelheimScenario.VERSION, mixinStandardHelpOptions = true)
@MATSimApplication.Prepare({
- CreateNetworkFromSumo.class, CreateTransitScheduleFromGtfs.class, TrajectoryToPlans.class, GenerateShortDistanceTrips.class,
- MergePopulations.class, ExtractRelevantFreightTrips.class, DownSamplePopulation.class, PrepareNetwork.class, ExtractHomeCoordinates.class,
- CreateLandUseShp.class, ResolveGridCoordinates.class, PreparePopulation.class, CleanPopulation.class, FixSubtourModes.class, SplitActivityTypesDuration.class
+ CreateNetworkFromSumo.class, CreateTransitScheduleFromGtfs.class, TrajectoryToPlans.class, GenerateShortDistanceTrips.class,
+ MergePopulations.class, ExtractRelevantFreightTrips.class, DownSamplePopulation.class, PrepareNetwork.class, ExtractHomeCoordinates.class,
+ CreateLandUseShp.class, ResolveGridCoordinates.class, PreparePopulation.class, CleanPopulation.class, FixSubtourModes.class, SplitActivityTypesDuration.class
})
@MATSimApplication.Analysis({
- TravelTimeAnalysis.class, LinkStats.class, CheckPopulation.class, DrtServiceQualityAnalysis.class, DrtVehiclesRoadUsageAnalysis.class
+ LinkStats.class, CheckPopulation.class, DrtServiceQualityAnalysis.class, DrtVehiclesRoadUsageAnalysis.class
})
public class RunKelheimScenario extends MATSimApplication {
- private static final double WEIGHT_1_PASSENGER = 26387.;
- private static final double WEIGHT_2_PASSENGER = 3843.;
- private static final double WEIGHT_3_PASSENGER = 879.;
- private static final double WEIGHT_4_PASSENGER = 409.;
- private static final double WEIGHT_5_PASSENGER = 68.;
- private static final double WEIGHT_6_PASSENGER = 18.;
- private static final double WEIGHT_7_PASSENGER = 4.;
- private static final double WEIGHT_8_PASSENGER = 1.;
-
- static final String VERSION = "3.0";
-
+ public static final String VERSION = "3.0";
+ private static final double WEIGHT_1_PASSENGER = 16517.;
+ private static final double WEIGHT_2_PASSENGER = 2084.;
+ private static final double WEIGHT_3_PASSENGER = 532.;
+ private static final double WEIGHT_4_PASSENGER = 163.;
+ private static final double WEIGHT_5_PASSENGER = 20.;
+ private static final double WEIGHT_6_PASSENGER = 5.;
+ private static final double WEIGHT_7_PASSENGER = 0.;
+ private static final double WEIGHT_8_PASSENGER = 0.;
@CommandLine.Mixin
private final SampleOptions sample = new SampleOptions(25, 10, 1);
@CommandLine.Option(names = "--with-drt", defaultValue = "false", description = "enable DRT service")
private boolean drt;
- @CommandLine.Option(names = "--income-dependent", defaultValue = "true", description = "enable income dependent monetary utility", negatable = true)
- private boolean incomeDependent;
+ // a couple of CommandLine.Options below actually are not strictly necessary but rather allow for circumvention of settings directly via config and/or config options.... (ts 07/23)
- @CommandLine.Option(names = "--av-fare", defaultValue = "2.0", description = "AV fare (euro per trip)")
+ /**
+ * the KEXI service has a zone-dependent fare system which is why we are using a custom fare implementation. Via this option, one can set a flat (constant) price for the AV service.
+ */
+ @CommandLine.Option(names = "--av-fare", defaultValue = "0.0", description = "AV fare (euro per trip)")
private double avFare;
- @CommandLine.Option(names = "--case-study", defaultValue = "NULL", description = "Case study for the av scenario")
- private KelheimCaseStudyTool.AvServiceArea avServiceArea;
-
@CommandLine.Option(names = "--bike-rnd", defaultValue = "false", description = "enable randomness in ASC of bike")
private boolean bikeRnd;
@CommandLine.Option(names = "--random-seed", defaultValue = "4711", description = "setting random seed for the simulation")
private long randomSeed;
- @CommandLine.Option(names = "--intermodal", defaultValue = "false", description = "enable DRT service")
+ @CommandLine.Option(names = "--intermodal", defaultValue = "false", description = "enable intermodality for DRT service")
private boolean intermodal;
@CommandLine.Option(names = "--plans", defaultValue = "", description = "Use different input plans")
private String planOrigin;
- @CommandLine.Mixin
- private StrategyOptions strategy = new StrategyOptions(StrategyOptions.ModeChoice.subTourModeChoice, "person");
-
public RunKelheimScenario(@Nullable Config config) {
super(config);
}
public RunKelheimScenario() {
- super(String.format("input/v%s/kelheim-v%s-25pct.config.xml", VERSION, VERSION));
+ super(String.format("input/v%s/kelheim-v%s-config.xml", VERSION, VERSION));
}
public static void main(String[] args) {
MATSimApplication.run(RunKelheimScenario.class, args);
}
+ public static void addDrtCompanionParameters(DrtWithExtensionsConfigGroup drtWithExtensionsConfigGroup) {
+ DrtCompanionParams drtCompanionParams = new DrtCompanionParams();
+ drtCompanionParams.setDrtCompanionSamplingWeights(List.of(
+ WEIGHT_1_PASSENGER,
+ WEIGHT_2_PASSENGER,
+ WEIGHT_3_PASSENGER,
+ WEIGHT_4_PASSENGER,
+ WEIGHT_5_PASSENGER,
+ WEIGHT_6_PASSENGER,
+ WEIGHT_7_PASSENGER,
+ WEIGHT_8_PASSENGER
+ ));
+ drtWithExtensionsConfigGroup.addParameterSet(drtCompanionParams);
+ }
+
@Nullable
@Override
protected Config prepareConfig(Config config) {
SnzActivities.addScoringParams(config);
- for (long ii = 600; ii <= 97200; ii += 600) {
- config.planCalcScore().addActivityParams(new PlanCalcScoreConfigGroup.ActivityParams("accomp_other_" + ii).setTypicalDuration(ii));
- config.planCalcScore().addActivityParams(new PlanCalcScoreConfigGroup.ActivityParams("accomp_children_" + ii).setTypicalDuration(ii));
- }
-
config.controler().setOutputDirectory(sample.adjustName(config.controler().getOutputDirectory()));
config.plans().setInputFile(sample.adjustName(config.plans().getInputFile()));
config.controler().setRunId(sample.adjustName(config.controler().getRunId()));
@@ -160,6 +158,14 @@ protected Config prepareConfig(Config config) {
config.global().setRandomSeed(randomSeed);
+ SimWrapperConfigGroup sw = ConfigUtils.addOrGetModule(config, SimWrapperConfigGroup.class);
+
+ // Relative to config
+ sw.defaultParams().shp = "../shp/dilutionArea.shp";
+ sw.defaultParams().mapCenter = "11.89,48.91";
+ sw.defaultParams().mapZoomLevel = 11d;
+ sw.defaultParams().sampleSize = sample.getSample();
+
if (intermodal) {
ConfigUtils.addOrGetModule(config, PtIntermodalRoutingModesConfigGroup.class);
}
@@ -168,19 +174,15 @@ protected Config prepareConfig(Config config) {
config.addModule(new MultiModeDrtConfigGroup(DrtWithExtensionsConfigGroup::new));
MultiModeDrtConfigGroup multiModeDrtConfig = ConfigUtils.addOrGetModule(config, MultiModeDrtConfigGroup.class);
- DrtWithExtensionsConfigGroup drtWithExtensionsConfigGroup = (DrtWithExtensionsConfigGroup) multiModeDrtConfig.getModalElements().iterator().next();
- DrtCompanionParams drtCompanionParams = new DrtCompanionParams();
- drtCompanionParams.setDrtCompanionSamplingWeights(List.of(
- WEIGHT_1_PASSENGER,
- WEIGHT_2_PASSENGER,
- WEIGHT_3_PASSENGER,
- WEIGHT_4_PASSENGER,
- WEIGHT_5_PASSENGER,
- WEIGHT_6_PASSENGER,
- WEIGHT_7_PASSENGER,
- WEIGHT_8_PASSENGER
- ));
- drtWithExtensionsConfigGroup.addParameterSet(drtCompanionParams);
+
+ for (DrtConfigGroup drtConfigGroup : multiModeDrtConfig.getModalElements()) {
+ //only the KEXI (conventionally driven drt) should get companions
+ if (drtConfigGroup.getMode().equals(TransportMode.drt)) {
+ DrtWithExtensionsConfigGroup drtWithExtensionsConfigGroup = (DrtWithExtensionsConfigGroup) drtConfigGroup;
+ addDrtCompanionParameters(drtWithExtensionsConfigGroup);
+ }
+ }
+
ConfigUtils.addOrGetModule(config, DvrpConfigGroup.class);
DrtConfigs.adjustMultiModeDrtConfig(multiModeDrtConfig, config.planCalcScore(), config.plansCalcRoute());
}
@@ -212,15 +214,12 @@ protected Config prepareConfig(Config config) {
// y = ax + b --> b value, for long trips
distanceBasedPtFareParams.setLongDistanceTripIntercept(30);
- if (strategy.getModeChoice() != StrategyOptions.ModeChoice.randomSubtourMode)
- strategy.applyConfig(config, this::addRunOption);
-
if (iterations != -1)
addRunOption(config, "iter", iterations);
if (!planOrigin.isBlank()) {
config.plans().setInputFile(
- config.plans().getInputFile().replace(".plans", ".plans-" + planOrigin)
+ config.plans().getInputFile().replace(".plans", ".plans-" + planOrigin)
);
addRunOption(config, planOrigin);
@@ -246,9 +245,9 @@ protected void prepareScenario(Scenario scenario) {
if (drt) {
scenario.getPopulation()
- .getFactory()
- .getRouteFactories()
- .setRouteFactory(DrtRoute.class, new DrtRouteFactory());
+ .getFactory()
+ .getRouteFactories()
+ .setRouteFactory(DrtRoute.class, new DrtRouteFactory());
}
if (bikeRnd) {
@@ -274,34 +273,32 @@ public void install() {
install(new KelheimPtFareModule());
install(new SwissRailRaptorModule());
install(new PersonMoneyEventsAnalysisModule());
+ install(new SimWrapperModule());
bind(AnalysisMainModeIdentifier.class).to(KelheimMainModeIdentifier.class);
addControlerListenerBinding().to(ModeChoiceCoverageControlerListener.class);
+ /*
if (strategy.getModeChoice() == StrategyOptions.ModeChoice.randomSubtourMode) {
// Configure mode-choice strategy
install(strategy.applyModule(binder(), config, builder ->
- builder.withFixedCosts(FixedCostsEstimator.DailyConstant.class, TransportMode.car)
- .withLegEstimator(DefaultLegScoreEstimator.class, ModeOptions.AlwaysAvailable.class, TransportMode.bike, TransportMode.ride, TransportMode.walk)
- .withLegEstimator(DefaultLegScoreEstimator.class, ModeOptions.ConsiderIfCarAvailable.class, TransportMode.car)
+ builder.withFixedCosts(FixedCostsEstimator.DailyConstant.class, TransportMode.car)
+ .withLegEstimator(DefaultLegScoreEstimator.class, ModeOptions.AlwaysAvailable.class, TransportMode.bike, TransportMode.ride, TransportMode.walk)
+ .withLegEstimator(DefaultLegScoreEstimator.class, ModeOptions.ConsiderIfCarAvailable.class, TransportMode.car)
// .withLegEstimator(MultiModalDrtLegEstimator.class, ModeOptions.AlwaysAvailable.class, "drt", "av")
- .withTripEstimator(PtTripWithDistanceBasedFareEstimator.class, ModeOptions.AlwaysAvailable.class, TransportMode.pt)
- .withActivityEstimator(DefaultActivityEstimator.class)
- // These are with activity estimation enabled
- .withPruner("ad999", new DistanceBasedPruner(3.03073657, 0.22950583))
- .withPruner("ad99", new DistanceBasedPruner(2.10630819, 0.0917091))
- .withPruner("ad95", new DistanceBasedPruner(1.72092386, 0.03189323))
- )
+ .withTripEstimator(PtTripWithDistanceBasedFareEstimator.class, ModeOptions.AlwaysAvailable.class, TransportMode.pt)
+ .withActivityEstimator(DefaultActivityEstimator.class)
+ // These are with activity estimation enabled
+ .withPruner("ad999", new DistanceBasedPruner(3.03073657, 0.22950583))
+ .withPruner("ad99", new DistanceBasedPruner(2.10630819, 0.0917091))
+ .withPruner("ad95", new DistanceBasedPruner(1.72092386, 0.03189323))
+ )
);
}
+ */
-
- if (incomeDependent) {
- bind(ScoringParametersForPerson.class).to(IncomeDependentUtilityOfMoneyPersonScoringParameters.class).asEagerSingleton();
- }
- if (incomeDependent) {
- bind(ScoringParametersForPerson.class).to(IncomeDependentUtilityOfMoneyPersonScoringParameters.class).asEagerSingleton();
- }
+ //use income-dependent marginal utility of money
+ bind(ScoringParametersForPerson.class).to(IncomeDependentUtilityOfMoneyPersonScoringParameters.class).asEagerSingleton();
if (bikeRnd) {
addEventHandlerBinding().toInstance(new PersonDepartureEventHandler() {
@@ -331,19 +328,16 @@ public void handleEvent(PersonDepartureEvent event) {
// Add speed limit to av vehicle
double maxSpeed = controler.getScenario()
- .getVehicles()
- .getVehicleTypes()
- .get(Id.create("autonomous_vehicle", VehicleType.class))
- .getMaximumVelocity();
+ .getVehicles()
+ .getVehicleTypes()
+ .get(Id.create("autonomous_vehicle", VehicleType.class))
+ .getMaximumVelocity();
controler.addOverridingModule(
- new DvrpModeLimitedMaxSpeedTravelTimeModule("av", config.qsim().getTimeStepSize(),
- maxSpeed));
+ new DvrpModeLimitedMaxSpeedTravelTimeModule("av", config.qsim().getTimeStepSize(),
+ maxSpeed));
for (DrtConfigGroup drtCfg : multiModeDrtConfig.getModalElements()) {
controler.addOverridingModule(new KelheimDrtFareModule(drtCfg, network, avFare));
- if (drtCfg.getMode().equals("av")) {
- KelheimCaseStudyTool.setConfigFile(config, drtCfg, avServiceArea);
- }
}
//controler.addOverridingModule(new DrtEstimatorModule());
diff --git a/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java b/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java
index 22bd633a..6b59ba95 100644
--- a/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java
+++ b/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java
@@ -1,23 +1,28 @@
package org.matsim.run.prepare;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.locationtech.jts.geom.Geometry;
import org.matsim.api.core.v01.Coord;
+import org.matsim.api.core.v01.Id;
import org.matsim.api.core.v01.network.Link;
import org.matsim.api.core.v01.network.Network;
import org.matsim.application.options.ShpOptions;
+import org.matsim.core.network.NetworkUtils;
import org.matsim.core.utils.collections.Tuple;
import org.matsim.core.utils.geometry.CoordUtils;
import org.matsim.core.utils.geometry.geotools.MGC;
+import org.matsim.core.utils.io.IOUtils;
import org.matsim.core.utils.io.MatsimXmlWriter;
import org.matsim.core.utils.io.UncheckedIOException;
+import org.matsim.run.RunKelheimScenario;
import org.opengis.feature.simple.SimpleFeature;
-import java.io.BufferedReader;
import java.io.FileWriter;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
@@ -42,16 +47,19 @@ public final class DrtStopsWriter extends MatsimXmlWriter {
this.outputFolder = outputFolder;
//If you just say serviceArea = shp.getGeometry() instead of looping through features
//somehow the first feature only is taken -sm0222
- List features = shp.readFeatures();
- for (SimpleFeature feature : features) {
- if (shp.getShapeFile() != null) {
- if (serviceArea == null) {
- serviceArea = (Geometry) feature.getDefaultGeometry();
- } else {
- serviceArea = serviceArea.union((Geometry) feature.getDefaultGeometry());
+ if (shp.isDefined()){
+ List features = shp.readFeatures();
+ for (SimpleFeature feature : features) {
+ if (shp.getShapeFile() != null) {
+ if (serviceArea == null) {
+ serviceArea = (Geometry) feature.getDefaultGeometry();
+ } else {
+ serviceArea = serviceArea.union((Geometry) feature.getDefaultGeometry());
+ }
}
}
}
+
}
/**
@@ -63,13 +71,19 @@ public void write() throws UncheckedIOException, IOException {
this.writeDoctype("transitSchedule", "http://www.matsim.org/files/dtd/transitSchedule_v1.dtd");
this.writeStartTag("transitSchedule", null);
this.writeStartTag("transitStops", null);
- this.writeTransitStops(network);
+ this.writeTransitStopsAndVizFiles(network);
this.writeEndTag("transitStops");
this.writeEndTag("transitSchedule");
this.close();
}
- private void writeTransitStops(Network network) throws IOException {
+ /**
+ * additionally to writing the stops xml file, also writes a csv file that contains the same information as well as a network file that contains only
+ * the links assigned to stops (for visualisation).
+ * @param network to retrieve link id's from
+ * @throws IOException if some file can't be opened or written
+ */
+ private void writeTransitStopsAndVizFiles(Network network) throws IOException {
// Write csv file for adjusted stop location
FileWriter csvWriter = new FileWriter(outputFolder + "/"
+ mode + "-stops-locations.csv");
@@ -86,39 +100,64 @@ private void writeTransitStops(Network network) throws IOException {
log.info("Start processing the network. This may take some time...");
URL data = new URL("https://svn.vsp.tu-berlin.de/" +
"repos/public-svn/matsim/scenarios/countries/de/kelheim/original-data/" +
- "KEXI_Haltestellen_Liste_Kelheim_utm32n.csv");
-
- BufferedReader csvReader = new BufferedReader(new InputStreamReader(data.openStream()));
- csvReader.readLine();
- String stopEntry = csvReader.readLine();
- while (stopEntry != null) {
-
- String[] stopData = stopEntry.split(";");
- // write stop
- Coord coord = new Coord(Double.parseDouble(stopData[2]), Double.parseDouble(stopData[3]));
-
- if (serviceArea == null || MGC.coord2Point(coord).within(serviceArea)) {
- List> attributes = new ArrayList>(5);
- attributes.add(createTuple("id", stopData[0]));
- attributes.add(createTuple("x", stopData[2]));
- attributes.add(createTuple("y", stopData[3]));
- Link link = getStopLink(coord, network);
- attributes.add(createTuple("linkRefId", link.getId().toString()));
- this.writeStartTag("stopFacility", attributes, true);
-
- csvWriter.append(stopData[0]);
- csvWriter.append(",");
- csvWriter.append(link.getId().toString());
- csvWriter.append(",");
- csvWriter.append(Double.toString(link.getToNode().getCoord().getX()));
- csvWriter.append(",");
- csvWriter.append(Double.toString(link.getToNode().getCoord().getY()));
- csvWriter.append("\n");
+ "KEXI_Haltestellen_Liste_Kelheim_utm32n_withLinkIds.csv");
+ Set> allLinks = new HashSet<>();
+
+ try (CSVParser parser = new CSVParser(IOUtils.getBufferedReader(data),
+ CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader())) {
+ for (CSVRecord row : parser) {
+ Coord coord = new Coord(Double.parseDouble(row.get("x")), Double.parseDouble(row.get("y")));
+ if (serviceArea == null || MGC.coord2Point(coord).within(serviceArea)) {
+ List> attributes = new ArrayList<>(5);
+ attributes.add(createTuple("id", row.get("Haltestellen-Nr.")));
+ attributes.add(createTuple("x", row.get("x")));
+ attributes.add(createTuple("y", row.get("y")));
+ Link link = null;
+ // If link is already determined by hand in the raw data, then use that link directly.
+ if (row.get("linkId_v" + RunKelheimScenario.VERSION)!=null){
+ link = network.getLinks().get(Id.createLinkId(row.get("linkId_v" + RunKelheimScenario.VERSION)));
+ } else {
+ link = getStopLink(coord, network);
+ }
+ allLinks.add(link.getId());
+ attributes.add(createTuple("linkRefId", link.getId().toString()));
+
+ //write into stops xml file
+ this.writeStartTag("stopFacility", attributes, true);
+
+ //write into csv file for viz
+ csvWriter.append(row.get("Haltestellen-Nr."));
+ csvWriter.append(",");
+ csvWriter.append(link.getId().toString());
+ csvWriter.append(",");
+ csvWriter.append(Double.toString(link.getToNode().getCoord().getX()));
+ csvWriter.append(",");
+ csvWriter.append(Double.toString(link.getToNode().getCoord().getY()));
+ csvWriter.append("\n");
+ }
}
-
- stopEntry = csvReader.readLine();
}
+
csvWriter.close();
+
+ //write filtered network file (for viz)
+ writeFilteredNetwork(network, allLinks);
+ }
+
+ private void writeFilteredNetwork(Network network, Set> allLinks) {
+ //remove all links but the ones in the set
+ network.getLinks().keySet()
+ .forEach(linkId -> {
+ if (!allLinks.contains(linkId)) {
+ network.removeLink(linkId);
+ }
+ });
+ //remove 'empty' nodes
+ network.getNodes().values().stream()
+ .filter(node -> node.getInLinks().size() == 0 && node.getOutLinks().size() == 0)
+ .forEach(node -> network.removeNode(node.getId()));
+
+ NetworkUtils.writeNetwork(network, outputFolder + "/" + mode + "-stops-links.xml.gz");
}
private Link getStopLink(Coord coord, Network network) {
diff --git a/src/main/java/org/matsim/run/prepare/PrepareNetwork.java b/src/main/java/org/matsim/run/prepare/PrepareNetwork.java
index 1ca9dbe8..3bafb9f9 100644
--- a/src/main/java/org/matsim/run/prepare/PrepareNetwork.java
+++ b/src/main/java/org/matsim/run/prepare/PrepareNetwork.java
@@ -159,14 +159,18 @@ private void prepareNetworkDrt(Network network) {
}
}
+ Set allowedModes = new HashSet<>(link.getAllowedModes());
if (isAvAllowed) {
- Set allowedModes = new HashSet<>(link.getAllowedModes());
-
if (!allowedModes.contains("av")) {
allowedModes.add("av");
link.setAllowedModes(allowedModes);
linkCount[1] = linkCount[1] + 1;
}
+ } else {
+ if (allowedModes.contains("av")) {
+ allowedModes.remove("av");
+ link.setAllowedModes(allowedModes);
+ }
}
}
diff --git a/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java b/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java
index ee836ab6..010606dc 100644
--- a/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java
+++ b/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java
@@ -8,6 +8,7 @@
import org.matsim.api.core.v01.Coord;
import org.matsim.api.core.v01.Id;
import org.matsim.api.core.v01.Scenario;
+import org.matsim.api.core.v01.TransportMode;
import org.matsim.api.core.v01.population.*;
import org.matsim.application.MATSimAppCommand;
import org.matsim.application.options.CrsOptions;
@@ -23,14 +24,14 @@
import java.util.Map;
@CommandLine.Command(
- name = "generate-real-drt-demand",
- description = "Prepare drt only population based on real data"
+ name = "generate-real-drt-demand",
+ description = "Prepare drt only population based on real data"
)
public class PrepareRealDrtDemand implements MATSimAppCommand {
private static final Logger log = LogManager.getLogger(PrepareRealDrtDemand.class);
- @CommandLine.Option(names = "--drt-stops", description = "path to drt stop xml file", required = true)
+ @CommandLine.Option(names = "--drt-stops", description = "path to drt stop xml file", defaultValue = "")
private String drtStops;
@CommandLine.Option(names = "--demands", description = "path to real drt demand csv file", required = true)
@@ -57,31 +58,34 @@ public Integer call() throws Exception {
// Map stationCoordMap = loadStationCoordinates();
try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(demands)),
- CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) {
+ CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) {
int counter = 0;
for (CSVRecord row : parser) {
- double fromX = Double.parseDouble(row.get(4));
- double fromY = Double.parseDouble(row.get(5));
- double toX = Double.parseDouble(row.get(7));
- double toY = Double.parseDouble(row.get(8));
+ double fromX = Double.parseDouble(row.get("from_x"));
+ double fromY = Double.parseDouble(row.get("from_y"));
+ double toX = Double.parseDouble(row.get("to_x"));
+ double toY = Double.parseDouble(row.get("to_y"));
Coord fromCoord = new Coord(fromX, fromY);
Coord transformedFromCoord = crs.getTransformation().transform(fromCoord);
Coord toCoord = new Coord(toX, toY);
Coord transformedToCoord = crs.getTransformation().transform(toCoord);
- double departureTime = Double.parseDouble(row.get(2));
-
- Person person = populationFactory.createPerson(Id.createPersonId("drt_" + counter));
- Plan plan = populationFactory.createPlan();
- Activity activity0 = populationFactory.createActivityFromCoord("home", transformedFromCoord);
- activity0.setEndTime(departureTime);
- Leg leg = populationFactory.createLeg("drt");
- Activity activity1 = populationFactory.createActivityFromCoord("work", transformedToCoord);
- plan.addActivity(activity0);
- plan.addLeg(leg);
- plan.addActivity(activity1);
- person.addPlan(plan);
- population.addPerson(person);
- counter += 1;
+ double departureTime = Double.parseDouble(row.get("time_in_seconds"));
+ int numberOfPassengers = Integer.parseInt(row.get("number_of_passengers"));
+
+ for (int i = 0; i < numberOfPassengers; i++) {
+ Person person = populationFactory.createPerson(Id.createPersonId("drt_person_" + counter));
+ Plan plan = populationFactory.createPlan();
+ Activity activity0 = populationFactory.createActivityFromCoord("dummy", transformedFromCoord);
+ activity0.setEndTime(departureTime);
+ Leg leg = populationFactory.createLeg(TransportMode.drt);
+ Activity activity1 = populationFactory.createActivityFromCoord("dummy", transformedToCoord);
+ plan.addActivity(activity0);
+ plan.addLeg(leg);
+ plan.addActivity(activity1);
+ person.addPlan(plan);
+ population.addPerson(person);
+ counter += 1;
+ }
}
log.info("There are in total {} DRT requests on that day", counter);
}
@@ -96,7 +100,7 @@ public Integer call() throws Exception {
private Map loadStationCoordinates() throws IOException {
Map stationCoordMap = new HashMap<>();
try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(drtStops)),
- CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) {
+ CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) {
for (CSVRecord row : parser) {
String stationName = row.get(0);
double x = Double.parseDouble(row.get(2));
diff --git a/src/main/java/org/matsim/run/utils/KelheimCaseStudyTool.java b/src/main/java/org/matsim/run/utils/KelheimCaseStudyTool.java
deleted file mode 100644
index 928e9e33..00000000
--- a/src/main/java/org/matsim/run/utils/KelheimCaseStudyTool.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package org.matsim.run.utils;
-
-import org.matsim.contrib.drt.run.DrtConfigGroup;
-import org.matsim.core.config.Config;
-
-/**
- * Helper class to setup config file for case studies.
- */
-public final class KelheimCaseStudyTool {
-
- private KelheimCaseStudyTool() {
- }
-
- /**
- * Different possible services areas for the AV.
- */
- public enum AvServiceArea {NULL, CORE, CORE_WITH_SHOP, HOHENPFAHL, BAUERNSIEDLUNG}
- // NULL: do not change anything; CORE: Donaupark + Altstadt; HOHENPFAHL: CORE + Hohenpfahl area; BAUERNSIEDLUNG: CORE + Bauernsiedlung area
-
- public static void setConfigFile(Config config, DrtConfigGroup drtConfig, AvServiceArea avServiceAreas) {
- // Set drt related things (vehicle file, stops file)
- if (avServiceAreas == AvServiceArea.CORE) {
- drtConfig.transitStopFile = "av-stops-DP-AS.xml";
- }
-
- if (avServiceAreas == AvServiceArea.CORE_WITH_SHOP) {
- drtConfig.transitStopFile = "av-stops-DP-AS-shops.xml";
- }
-
- if (avServiceAreas == AvServiceArea.HOHENPFAHL) {
- drtConfig.transitStopFile = "av-stops-Hohenpfahl-DP-AS.xml";
- }
-
- if (avServiceAreas == AvServiceArea.BAUERNSIEDLUNG) {
- drtConfig.transitStopFile = "av-stops-Bauernsiedlung-DP-AS.xml";
- }
-
- // Update output directory
- if (avServiceAreas != AvServiceArea.NULL) {
- String outputPath = config.controler().getOutputDirectory() + "-" + avServiceAreas.toString();
- config.controler().setOutputDirectory(outputPath);
- }
- }
-}
diff --git a/src/main/python/adj.py b/src/main/python/adj.py
index 3fcca58e..3ce2a308 100644
--- a/src/main/python/adj.py
+++ b/src/main/python/adj.py
@@ -15,11 +15,14 @@
sagg = sim.groupby("dist_group").sum()
sagg['share'] = sagg.trips / np.sum(sagg.trips)
- print("Start")
+ print("##### Start")
print(sagg)
- print("Mid")
- print(mid)
+ print("##### Mid")
+# print(mid)
+ print(mid.groupby("dist_group").sum())
+ print(mid.groupby("mode").sum())
+ print("#############")
# Rescale the distance groups of the survey data so that it matches the distance group distribution of the simulation
# The overall mode share after this adjustment will the resulting adjusted mode share
@@ -44,7 +47,7 @@ def f(x, p=False):
return err
# One variable for each distance group
- x0 = np.ones(6) / 6
+ x0 = np.ones(5) / 5
# Sum of weights need to be smaller than one
cons = [{'type': 'ineq', 'fun': lambda x: 1 - sum(x)}]
diff --git a/src/main/python/calibrate.py b/src/main/python/calibrate.py
index 4efc02f5..871be716 100755
--- a/src/main/python/calibrate.py
+++ b/src/main/python/calibrate.py
@@ -29,15 +29,6 @@
"ride": 0.17
}
-# Use adjusted modal split for our distance distribution
-target = {
- "walk": 0.111505,
- "bike": 0.068790,
- "pt": 0.038063,
- "car": 0.612060,
- "ride": 0.169581
-}
-
city = gpd.read_file("../input/shp/dilutionArea.shp").set_crs("EPSG:25832")
homes = pd.read_csv("../input/v3.0/kelheim-v3.0-homes.csv", dtype={"person": "str"})
@@ -55,12 +46,12 @@ def filter_modes(df):
return df[df.main_mode.isin(modes)]
study, obj = calibration.create_mode_share_study("calib", "matsim-kelheim-3.x-SNAPSHOT-00602ea.jar",
- "../input/v3.0/kelheim-v3.0-25pct.config.xml",
+ "../input/v3.0/kelheim-v3.0-config.xml",
modes, target,
initial_asc=initial,
args="--25pct --config:TimeAllocationMutator.mutationRange=900",
jvm_args="-Xmx46G -Xms46G -XX:+AlwaysPreTouch -XX:+UseParallelGC",
- lr=calibration.auto_lr_scheduler(),
+ lr=calibration.linear_lr_scheduler(interval=6),
person_filter=f, map_trips=filter_modes, chain_runs=calibration.default_chain_scheduler)
diff --git a/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider b/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider
new file mode 100644
index 00000000..f64bb71e
--- /dev/null
+++ b/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider
@@ -0,0 +1 @@
+org.matsim.dashboards.KelheimDashboardProvider
\ No newline at end of file
diff --git a/src/main/resources/kelheim-v3.0-routes-ref.csv.gz b/src/main/resources/kelheim-v3.0-routes-ref.csv.gz
new file mode 100644
index 00000000..7f0eaaee
Binary files /dev/null and b/src/main/resources/kelheim-v3.0-routes-ref.csv.gz differ
diff --git a/src/main/resources/kelheim_mode_share.csv b/src/main/resources/kelheim_mode_share.csv
new file mode 100644
index 00000000..ccb40e8e
--- /dev/null
+++ b/src/main/resources/kelheim_mode_share.csv
@@ -0,0 +1,31 @@
+main_mode,dist_group,share
+walk,0 - 1000,0.077997404
+walk,1000 - 2000,0.022437119
+walk,2000 - 5000,0.021648596
+walk,5000 - 10000,0.006526142
+walk,10000 - 20000,0.001390738
+walk,20000+,0
+bike,0 - 1000,0.030063872
+bike,1000 - 2000,0.018591584
+bike,2000 - 5000,0.019530356
+bike,5000 - 10000,0.005740392
+bike,10000 - 20000,0.004077643
+bike,20000+,0.001996153
+car,0 - 1000,0.053455367
+car,1000 - 2000,0.057565713
+car,2000 - 5000,0.112306003
+car,5000 - 10000,0.111864345
+car,10000 - 20000,0.125054917
+car,20000+,0.129753655
+ride,0 - 1000,0.016991043
+ride,1000 - 2000,0.015283749
+ride,2000 - 5000,0.03320568
+ride,5000 - 10000,0.029744347
+ride,10000 - 20000,0.035654628
+ride,20000+,0.039120553
+pt,0 - 1000,0
+pt,1000 - 2000,0.000795998
+pt,2000 - 5000,0.005435246
+pt,5000 - 10000,0.007455167
+pt,10000 - 20000,0.006808784
+pt,20000+,0.009504804
diff --git a/src/main/resources/kelheim_mode_share_per_dist.csv b/src/main/resources/kelheim_mode_share_per_dist.csv
new file mode 100644
index 00000000..50fcb530
--- /dev/null
+++ b/src/main/resources/kelheim_mode_share_per_dist.csv
@@ -0,0 +1,31 @@
+main_mode,dist_group,share
+walk,0 - 1000,0.525302484
+walk,1000 - 2000,0.255555556
+walk,2000 - 5000,0.149425287
+walk,5000 - 10000,0.054347826
+walk,10000 - 20000,0.010989011
+walk,20000+,0
+bike,0 - 1000,0.138114867
+bike,1000 - 2000,0.144444444
+bike,2000 - 5000,0.091954023
+bike,5000 - 10000,0.032608696
+bike,10000 - 20000,0.021978022
+bike,20000+,0.010251064
+car,0 - 1000,0.256239029
+car,1000 - 2000,0.466666667
+car,2000 - 5000,0.551724138
+car,5000 - 10000,0.663043478
+car,10000 - 20000,0.703296703
+car,20000+,0.695269506
+ride,0 - 1000,0.08034362
+ride,1000 - 2000,0.122222222
+ride,2000 - 5000,0.16091954
+ride,5000 - 10000,0.173913043
+ride,10000 - 20000,0.197802198
+ride,20000+,0.206783538
+pt,0 - 1000,0
+pt,1000 - 2000,0.011111111
+pt,2000 - 5000,0.045977011
+pt,5000 - 10000,0.076086957
+pt,10000 - 20000,0.065934066
+pt,20000+,0.087695892
\ No newline at end of file
diff --git a/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java b/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java
index db052c76..872f32e4 100644
--- a/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java
+++ b/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java
@@ -6,6 +6,7 @@
import org.matsim.core.config.Config;
import org.matsim.core.config.ConfigUtils;
import org.matsim.core.controler.OutputDirectoryHierarchy;
+import org.matsim.simwrapper.SimWrapperConfigGroup;
import org.matsim.testcases.MatsimTestUtils;
public class RunKelheimIntegrationTest {
@@ -18,22 +19,24 @@ public final void runExamplePopulationTest() {
config.controler().setLastIteration(1);
config.global().setNumberOfThreads(1);
config.qsim().setNumberOfThreads(1);
+ config.controler().setOverwriteFileSetting(OutputDirectoryHierarchy.OverwriteFileSetting.deleteDirectoryIfExists);
- config.controler()
- .setOverwriteFileSetting(OutputDirectoryHierarchy.OverwriteFileSetting.deleteDirectoryIfExists);
+ ConfigUtils.addOrGetModule(config, SimWrapperConfigGroup.class).defaultDashboards = SimWrapperConfigGroup.Mode.disabled;
MATSimApplication.execute(RunKelheimScenario.class, config,
- "run", "--1pct");
+ "run", "--1pct");
}
@Test
public final void runDrtExamplePopulationTest() {
Config config = ConfigUtils.loadConfig("input/test.with-drt.config.xml");
config.controler().setLastIteration(1);
- config.controler()
- .setOverwriteFileSetting(OutputDirectoryHierarchy.OverwriteFileSetting.deleteDirectoryIfExists);
+ config.controler().setOverwriteFileSetting(OutputDirectoryHierarchy.OverwriteFileSetting.deleteDirectoryIfExists);
+
+ ConfigUtils.addOrGetModule(config, SimWrapperConfigGroup.class).defaultDashboards = SimWrapperConfigGroup.Mode.disabled;
+
MATSimApplication.execute(RunKelheimScenario.class, config,
- "run", "--1pct", "--with-drt");
+ "run", "--1pct", "--with-drt");
}
}