From 3d48fa6c7ff6d1bb59ebe910ab321b9f2d79f9f4 Mon Sep 17 00:00:00 2001 From: Chengqi Lu <43133404+luchengqi7@users.noreply.github.com> Date: Mon, 3 Jun 2024 11:28:28 +0200 Subject: [PATCH 1/2] Create PlotDrtStoppingTasks.java (#77) * Create PlotDrtStoppingTasks.java * checkstyle --------- Co-authored-by: schlenther --- .../drt/PlotDrtStoppingTasks.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 src/main/java/org/matsim/analysis/postAnalysis/drt/PlotDrtStoppingTasks.java diff --git a/src/main/java/org/matsim/analysis/postAnalysis/drt/PlotDrtStoppingTasks.java b/src/main/java/org/matsim/analysis/postAnalysis/drt/PlotDrtStoppingTasks.java new file mode 100644 index 0000000..ffdaaf5 --- /dev/null +++ b/src/main/java/org/matsim/analysis/postAnalysis/drt/PlotDrtStoppingTasks.java @@ -0,0 +1,20 @@ +package org.matsim.analysis.postAnalysis.drt; + +import org.matsim.contrib.drt.analysis.afterSimAnalysis.DrtVehicleStoppingTaskWriter; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * Plot DRT stopping task (idle and pickup/drop-off stops) on the map, with start time and end time. + * Please specify the output directory of the DRT in the input argument. + */ +public final class PlotDrtStoppingTasks { + + private PlotDrtStoppingTasks() {} + + public static void main(String[] args) throws IOException { + new DrtVehicleStoppingTaskWriter(Path.of(args[0])).run(); + } + +} From 82cad544356ea8939bb45b831c3b1c8dbe283a1b Mon Sep 17 00:00:00 2001 From: simei94 <67737999+simei94@users.noreply.github.com> Date: Tue, 4 Jun 2024 15:07:28 +0200 Subject: [PATCH 2/2] Average dashboard drt (#78) * average drt dashboard wip * bump to matsim-2025.0.PR3254 * use GridMap instead of XY * bump to java 21 * move to dashboard package + add some dashboards * move to dashboard package + add some dashboards * update config (ReplanningAnnealerConfigGroup) * enable plan inheritance analysis via config * average dashboard + analysis WIP * re-structure tables * customize dashboard + add todo * add average emissions dashboard infrastructure WIP * bump to matsim 2025 PR 3271 * finalize average emissions dashboard * average noise dashboard * use new append option rather than renaming existing dashboards. needs to be tested though * checkstyle * update drt config to fix tests * bump up maven compiler + jacoco * actions v4 * handle drt groups in KelheimDrtFareHandler (by not cleaning map :/ ) --------- Co-authored-by: tschlenther --- .github/workflows/build.yaml | 12 +- input/test.config.xml | 2 +- input/test.with-drt.config.xml | 38 ++- input/v3.1/kelheim-v3.1-25pct.kexi.config.xml | 2 +- input/v3.1/kelheim-v3.1-config.xml | 2 +- pom.xml | 44 +++- .../analysis/CreateEmissionDashboard.java | 4 +- .../PotentialServiceAreaAnalysis.java | 6 +- .../emissions/KelheimEmissionsDashboard.java | 6 +- ...missionsPostProcessingAverageAnalysis.java | 233 +++++++++++++++++ .../GenerateKelheimAnalysisNetwork.java | 4 +- .../postAnalysis/NoiseAverageAnalysis.java | 246 ++++++++++++++++++ .../TransformCoordinatesTripCSV.java | 10 +- .../drt/CalcEuclideanDistances.java | 6 +- .../drt/DrtPostProcessingAverageAnalysis.java | 201 ++++++++++++++ .../drt/DrtServiceQualityAnalysis.java | 12 +- .../scoring/OutputPlansScoringAnalysis.java | 5 +- .../preAnalysis/ModeShareAnalysis.java | 5 +- .../matsim/dashboard/AverageDrtDashboard.java | 202 ++++++++++++++ .../AverageKelheimEmissionsDashboard.java | 118 +++++++++ .../AverageKelheimNoiseDashboard.java | 95 +++++++ .../dashboard/CreateAverageDashboards.java | 104 ++++++++ .../KelheimDashboardProvider.java | 6 +- .../matsim/drtFare/KelheimDrtFareHandler.java | 16 +- .../org/matsim/run/RunKelheimScenario.java | 5 +- .../matsim/run/prepare/DrtStopsWriter.java | 5 +- .../run/prepare/PrepareRealDrtDemand.java | 8 +- .../org.matsim.simwrapper.DashboardProvider | 2 +- .../matsim/run/RunKelheimIntegrationTest.java | 8 +- 29 files changed, 1321 insertions(+), 86 deletions(-) create mode 100644 src/main/java/org/matsim/analysis/postAnalysis/EmissionsPostProcessingAverageAnalysis.java create mode 100644 src/main/java/org/matsim/analysis/postAnalysis/NoiseAverageAnalysis.java create mode 100644 src/main/java/org/matsim/analysis/postAnalysis/drt/DrtPostProcessingAverageAnalysis.java create mode 100644 src/main/java/org/matsim/dashboard/AverageDrtDashboard.java create mode 100644 src/main/java/org/matsim/dashboard/AverageKelheimEmissionsDashboard.java create mode 100644 src/main/java/org/matsim/dashboard/AverageKelheimNoiseDashboard.java create mode 100644 src/main/java/org/matsim/dashboard/CreateAverageDashboards.java rename src/main/java/org/matsim/{analysis => dashboard}/KelheimDashboardProvider.java (87%) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 1150725..9e99528 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -11,10 +11,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: - java-version: 17 + java-version: 21 architecture: x64 distribution: adopt cache: maven @@ -32,7 +32,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - java-version: 17 + java-version: 21 architecture: x64 distribution: adopt cache: maven @@ -51,7 +51,7 @@ jobs: strategy: fail-fast: false matrix: - java: [17] + java: [21] steps: - uses: actions/checkout@v3 @@ -80,7 +80,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-java@v3 with: - java-version: 17 + java-version: 21 architecture: x64 distribution: adopt cache: maven diff --git a/input/test.config.xml b/input/test.config.xml index 4d4855c..6be56a0 100644 --- a/input/test.config.xml +++ b/input/test.config.xml @@ -78,7 +78,7 @@ - + diff --git a/input/test.with-drt.config.xml b/input/test.with-drt.config.xml index 20e4775..feaca1e 100644 --- a/input/test.with-drt.config.xml +++ b/input/test.with-drt.config.xml @@ -77,7 +77,7 @@ - + @@ -292,15 +292,10 @@ - - - - - - - - - + + + + @@ -329,16 +324,10 @@ - - - - - - - - - - + + + + @@ -349,9 +338,12 @@ - - - + + + + + + diff --git a/input/v3.1/kelheim-v3.1-25pct.kexi.config.xml b/input/v3.1/kelheim-v3.1-25pct.kexi.config.xml index c2ea1d7..facb12e 100644 --- a/input/v3.1/kelheim-v3.1-25pct.kexi.config.xml +++ b/input/v3.1/kelheim-v3.1-25pct.kexi.config.xml @@ -77,7 +77,7 @@ - + diff --git a/input/v3.1/kelheim-v3.1-config.xml b/input/v3.1/kelheim-v3.1-config.xml index 14b5033..a082d34 100644 --- a/input/v3.1/kelheim-v3.1-config.xml +++ b/input/v3.1/kelheim-v3.1-config.xml @@ -78,7 +78,7 @@ - + diff --git a/pom.xml b/pom.xml index 94f2729..8dd724e 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ matsim-all - 16.0-PR2878 + 2025.0-PR3271 @@ -27,7 +27,7 @@ UTF-8 UTF-8 - 17 + 21 @@ -88,11 +88,30 @@ + + + + + + + + + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter + test + + - - junit - junit - 4.13.2 + org.assertj + assertj-core + 3.24.2 test @@ -187,9 +206,16 @@ commons-io commons-io - 2.13.0 + 2.16.1 + + + + org.apache.avro + avro + 1.11.3 + @@ -208,7 +234,7 @@ 1 false - @{argLine} -Xmx6500m -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true + @{argLine} -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true @@ -217,7 +243,7 @@ org.jacoco jacoco-maven-plugin - 0.8.8 + 0.8.12 diff --git a/src/main/java/org/matsim/analysis/CreateEmissionDashboard.java b/src/main/java/org/matsim/analysis/CreateEmissionDashboard.java index f3aee04..6ea2e79 100644 --- a/src/main/java/org/matsim/analysis/CreateEmissionDashboard.java +++ b/src/main/java/org/matsim/analysis/CreateEmissionDashboard.java @@ -66,7 +66,7 @@ public Integer call() throws Exception { log.info("Running on {}", runDirectory); //this is to avoid overriding - renameExistingDashboardYAMLs(runDirectory); +// renameExistingDashboardYAMLs(runDirectory); Path configPath = ApplicationUtils.matchInput("config.xml", runDirectory); Config config = ConfigUtils.loadConfig(configPath.toString()); @@ -88,7 +88,7 @@ public Integer call() throws Exception { } try { - sw.generate(runDirectory); + sw.generate(runDirectory, true); sw.run(runDirectory); } catch (IOException e) { throw new RuntimeException(e); diff --git a/src/main/java/org/matsim/analysis/PotentialServiceAreaAnalysis.java b/src/main/java/org/matsim/analysis/PotentialServiceAreaAnalysis.java index 0e33859..3f5c340 100644 --- a/src/main/java/org/matsim/analysis/PotentialServiceAreaAnalysis.java +++ b/src/main/java/org/matsim/analysis/PotentialServiceAreaAnalysis.java @@ -50,7 +50,7 @@ import org.matsim.core.utils.geometry.CoordinateTransformation; import org.matsim.core.utils.geometry.geotools.MGC; import org.matsim.core.utils.geometry.transformations.TransformationFactory; -import org.matsim.core.utils.gis.ShapeFileReader; +import org.matsim.core.utils.gis.GeoFileReader; import org.matsim.core.utils.io.IOUtils; import org.matsim.freight.carriers.*; import org.matsim.vehicles.Vehicle; @@ -103,7 +103,7 @@ public static void main(String[] args) { //read in service area map PreparedGeometryFactory factory = new PreparedGeometryFactory(); - Map serviceAreas = StreamEx.of(ShapeFileReader.getAllFeatures(IOUtils.getFileUrl(INPUT_SERVICE_AREAS_SHAPE))) + Map serviceAreas = StreamEx.of(GeoFileReader.getAllFeatures(IOUtils.getFileUrl(INPUT_SERVICE_AREAS_SHAPE))) .mapToEntry(sf -> (String) sf.getAttribute("name"), sf -> factory.create((Geometry) sf.getDefaultGeometry())) .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); @@ -300,8 +300,6 @@ private static Carrier buildCarrier(String areaName, Collection stops, Net CarrierVehicle.Builder vBuilder = CarrierVehicle.Builder.newInstance(Id.create((areaName + "_shuttle"), Vehicle.class), depotLink, vehicleType); vBuilder.setEarliestStart(0 * 60 * 60); vBuilder.setLatestEnd(24 * 60 * 60); - vBuilder.setType(vehicleType); - vBuilder.setTypeId(vehicleType.getId()); CarrierVehicle vehicle = vBuilder.build(); carrier.getCarrierCapabilities().getCarrierVehicles().put(vehicle.getId(), vehicle); diff --git a/src/main/java/org/matsim/analysis/emissions/KelheimEmissionsDashboard.java b/src/main/java/org/matsim/analysis/emissions/KelheimEmissionsDashboard.java index fbd239d..c8d92e7 100644 --- a/src/main/java/org/matsim/analysis/emissions/KelheimEmissionsDashboard.java +++ b/src/main/java/org/matsim/analysis/emissions/KelheimEmissionsDashboard.java @@ -24,9 +24,9 @@ import org.matsim.simwrapper.Dashboard; import org.matsim.simwrapper.Header; import org.matsim.simwrapper.Layout; +import org.matsim.simwrapper.viz.GridMap; import org.matsim.simwrapper.viz.Links; import org.matsim.simwrapper.viz.Table; -import org.matsim.simwrapper.viz.XYTime; /** * this is basically equivalent to the standard emissions dashboard @@ -88,14 +88,14 @@ public void configure(Header header, Layout layout) { viz.center = data.context().getCenter(); viz.width = 3.0; }); - layout.row("second").el(XYTime.class, (viz, data) -> { + layout.row("second").el(GridMap.class, (viz, data) -> { viz.title = "CO₂ Emissions"; viz.description = "per day. Be aware that CO2 values are provided in the simulation sample size!"; viz.height = 12.0; viz.file = data.compute(KelheimOfflineAirPollutionAnalysisByEngineInformation.class, "emissions_grid_per_day.xyt.csv", new String[0]); }); layout.row("third") - .el(XYTime.class, (viz, data) -> { + .el(GridMap.class, (viz, data) -> { viz.title = "CO₂ Emissions"; viz.description = "per hour. Be aware that CO2 values are provided in the simulation sample size!"; viz.height = 12.; diff --git a/src/main/java/org/matsim/analysis/postAnalysis/EmissionsPostProcessingAverageAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/EmissionsPostProcessingAverageAnalysis.java new file mode 100644 index 0000000..ce66b34 --- /dev/null +++ b/src/main/java/org/matsim/analysis/postAnalysis/EmissionsPostProcessingAverageAnalysis.java @@ -0,0 +1,233 @@ +package org.matsim.analysis.postAnalysis; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.matsim.api.core.v01.Coord; +import org.matsim.application.CommandSpec; +import org.matsim.application.MATSimAppCommand; +import org.matsim.application.options.CsvOptions; +import org.matsim.application.options.InputOptions; +import org.matsim.application.options.OutputOptions; +import org.matsim.core.utils.io.IOUtils; +import picocli.CommandLine; +import tech.tablesaw.api.ColumnType; +import tech.tablesaw.api.Row; +import tech.tablesaw.api.Table; +import tech.tablesaw.io.csv.CsvReadOptions; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +import static org.matsim.application.ApplicationUtils.globFile; + +@CommandLine.Command(name = "average-emissions", description = "Calculates average emission stats based on several sim runs with different random seeds.") +@CommandSpec( + requires = {"runs"}, + produces = {"mean_emissions_total.csv", "mean_emissions_per_link_per_m.csv", "mean_emissions_grid_per_day.xyt.csv", "mean_emissions_grid_per_hour.csv"} +) +public class EmissionsPostProcessingAverageAnalysis implements MATSimAppCommand { + + @CommandLine.Mixin + private InputOptions input = InputOptions.ofCommand(EmissionsPostProcessingAverageAnalysis.class); + @CommandLine.Mixin + private OutputOptions output = OutputOptions.ofCommand(EmissionsPostProcessingAverageAnalysis.class); + @CommandLine.Option(names = "--no-runs", defaultValue = "5", description = "Number of simulation runs to be averaged.") + private Integer noRuns; + + private final Map> totalStats = new HashMap<>(); + private final Map> perLinkMStats = new HashMap<>(); + private final Map, List> gridPerDayStats = new HashMap<>(); + private final Map, List> gridPerHourStats = new HashMap<>(); + private final Map meanTotal = new HashMap<>(); + private final Map meanPerLinkM = new HashMap<>(); + private final Map, Double> meanGridPerDay = new HashMap<>(); + private final Map, Double> meanGridPerHour = new HashMap<>(); + + private final CsvOptions csv = new CsvOptions(); + private static final String VALUE = "value"; + private static final String LINK_ID = "linkId"; + private static final String POLLUTANT = "Pollutant"; + private static final String ANALYSIS_DIR = "/analysis/emissions"; + + public static void main(String[] args) { + new EmissionsPostProcessingAverageAnalysis().execute(args); + } + + @Override + public Integer call() throws Exception { + + String runs = input.getPath("runs"); + +// function to determine column types + Function columnTypeFunction = columnName -> { + if (columnName.equals(LINK_ID) || columnName.equals(POLLUTANT)) { + return ColumnType.STRING; + } else { + return ColumnType.DOUBLE; + } + }; + + List foldersSeeded = Arrays.stream(runs.split(",")).toList(); + +// add stats from every run to map + for (String folder : foldersSeeded) { + final Path analysisDir = Path.of(folder + ANALYSIS_DIR); + String totalCsv = globFile(analysisDir, "*emissions_total.csv*").toString(); + String emissionsPerLinkMCsv = globFile(analysisDir, "*emissions_per_link_per_m.csv*").toString(); + String emissionsGridPerDayCsv = globFile(analysisDir, "*emissions_grid_per_day.xyt.csv*").toString(); + String emissionsGridPerHourCsv = globFile(analysisDir, "*emissions_grid_per_hour.csv*").toString(); + + Table total = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(totalCsv)) + .columnTypes(columnTypeFunction) + .sample(false) + .separator(csv.detectDelimiter(totalCsv)).build()); + + Table emissionsPerLinkM = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(emissionsPerLinkMCsv)) + .columnTypes(columnTypeFunction) + .sample(false) + .separator(csv.detectDelimiter(emissionsPerLinkMCsv)).build()); + + Table emissionsGridPerDay = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(emissionsGridPerDayCsv)) + .columnTypes(columnTypeFunction) + .sample(false) + .separator(csv.detectDelimiter(emissionsGridPerDayCsv)).header(true).build()); + + Table emissionsGridPerHour = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(emissionsGridPerHourCsv)) + .columnTypes(columnTypeFunction) + .sample(false) + .separator(csv.detectDelimiter(emissionsGridPerHourCsv)).build()); + +// get all total stats + for (int i = 0; i < total.rowCount(); i++) { + Row row = total.row(i); + + if (!totalStats.containsKey(row.getString(POLLUTANT))) { + totalStats.put(row.getString(POLLUTANT), new ArrayList<>()); + } + totalStats.get(row.getString(POLLUTANT)).add(row.getDouble("kg")); + } + +// get all per link per m stats + for (int i = 0; i < emissionsPerLinkM.rowCount(); i++) { + Row row = emissionsPerLinkM.row(i); + Double[] values = new Double[emissionsPerLinkM.columnCount() - 1]; + +// iterate through columns. this file contains 23 params per link, as of may24 + for (int j = 1; j < emissionsPerLinkM.columnCount(); j++) { + if (!perLinkMStats.containsKey(row.getString(LINK_ID))) { + perLinkMStats.put(row.getString(LINK_ID), new ArrayList<>()); + } + values[j - 1] = row.getDouble(j); + } + perLinkMStats.get(row.getString(LINK_ID)).add(values); + } + +// get all grid per day stats + getGridData(emissionsGridPerDay, gridPerDayStats); +// get all grid per day stats + getGridData(emissionsGridPerHour, gridPerHourStats); + } + +// calc means for every map +// total means + for (Map.Entry> e : totalStats.entrySet()) { + AtomicReference sum = new AtomicReference<>(0.); + e.getValue().forEach(d -> sum.set(sum.get() + d)); + + meanTotal.put(e.getKey(), sum.get() / e.getValue().size()); + } + +// per linkM means + for (Map.Entry> e : perLinkMStats.entrySet()) { + + Double[] sums = new Double[e.getValue().get(0).length]; + + for (Double[] d : e.getValue()) { + for (int i = 0; i <= d.length - 1; i++) { +// initial array values are null + if (sums[i] == null) { + sums[i] = 0.; + } + sums[i] += d[i]; + } + } + + Double[] means = new Double[sums.length]; + for (int i = 0; i <= sums.length - 1; i++) { + means[i] = sums[i] / e.getValue().size(); + } + meanPerLinkM.put(e.getKey(), means); + } + +// grid per day means + calcGridMeans(gridPerDayStats, meanGridPerDay); +// grid per hour means + calcGridMeans(gridPerHourStats, meanGridPerHour); + +// write total mean stats + try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath("mean_emissions_total.csv")), CSVFormat.DEFAULT)) { + printer.printRecord(POLLUTANT, "kg"); + + for (Map.Entry e : meanTotal.entrySet()) { + printer.printRecord("mean-" + e.getKey(), e.getValue()); + } + } + +// write per linkM mean stats + try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath("mean_emissions_per_link_per_m.csv")), CSVFormat.DEFAULT)) { + printer.printRecord(LINK_ID, "CO [g/m]", "CO2_TOTAL [g/m]", "FC [g/m]", "HC [g/m]", "NMHC [g/m]", "NOx [g/m]", "NO2 [g/m]", "PM [g/m]", "SO2 [g/m]", + "FC_MJ [g/m]", "CO2_rep [g/m]", "CO2e [g/m]", "PM2_5 [g/m]", "PM2_5_non_exhaust [g/m]", "PM_non_exhaust [g/m]", "BC_exhaust [g/m]", "BC_non_exhaust [g/m]", + "Benzene [g/m]", "PN [g/m]", "Pb [g/m]", "CH4 [g/m]", "N2O [g/m]", "NH3 [g/m]" + ); + + for (Map.Entry e : meanPerLinkM.entrySet()) { + printer.printRecord(e.getKey(), e.getValue()[0], e.getValue()[1], e.getValue()[2], e.getValue()[3], e.getValue()[4], e.getValue()[5], + e.getValue()[6], e.getValue()[7], e.getValue()[8], e.getValue()[9], e.getValue()[10], e.getValue()[11], e.getValue()[12], e.getValue()[13], + e.getValue()[14], e.getValue()[15], e.getValue()[16], e.getValue()[17], e.getValue()[18], e.getValue()[19], e.getValue()[20], e.getValue()[21], + e.getValue()[22]); + } + } + +// write grid mean stats + writeGridFile("mean_emissions_grid_per_day.xyt.csv", meanGridPerDay); + writeGridFile("mean_emissions_grid_per_hour.csv", meanGridPerHour); + + return 0; + } + + private void calcGridMeans(Map, List> originMap, Map, Double> targetMap) { + for (Map.Entry, List> e : originMap.entrySet()) { + AtomicReference sum = new AtomicReference<>(0.); + e.getValue().forEach(d -> sum.set(sum.get() + d)); + + targetMap.put(e.getKey(), sum.get() / e.getValue().size()); + } + } + + private void getGridData(Table gridTable, Map, List> dataMap) { + for (int i = 0; i < gridTable.rowCount(); i++) { + Row row = gridTable.row(i); + Map.Entry entry = new AbstractMap.SimpleEntry<>(row.getDouble("time"), new Coord(row.getDouble("x"), row.getDouble("y"))); + + dataMap.computeIfAbsent(entry, key -> new ArrayList<>()); + dataMap.get(entry).add(row.getDouble(VALUE)); + } + } + + private void writeGridFile(String fileName, Map, Double> values) throws IOException { + try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath(fileName)), CSVFormat.DEFAULT)) { + + printer.printRecord("# EPSG:25832"); + printer.printRecord("time", "x", "y", VALUE); + + for (Map.Entry, Double> e : values.entrySet()) { + printer.printRecord(e.getKey().getKey(), e.getKey().getValue().getX(), e.getKey().getValue().getY(), e.getValue()); + } + } + } +} + diff --git a/src/main/java/org/matsim/analysis/postAnalysis/GenerateKelheimAnalysisNetwork.java b/src/main/java/org/matsim/analysis/postAnalysis/GenerateKelheimAnalysisNetwork.java index 3cb3f69..8a7f5a9 100644 --- a/src/main/java/org/matsim/analysis/postAnalysis/GenerateKelheimAnalysisNetwork.java +++ b/src/main/java/org/matsim/analysis/postAnalysis/GenerateKelheimAnalysisNetwork.java @@ -24,7 +24,7 @@ import org.matsim.api.core.v01.network.Network; import org.matsim.api.core.v01.network.Node; import org.matsim.application.MATSimAppCommand; -import org.matsim.contrib.zone.ZonalSystems; +import org.matsim.contrib.common.zones.ZoneSystemUtils; import org.matsim.core.config.groups.NetworkConfigGroup; import org.matsim.core.network.NetworkUtils; import org.matsim.core.network.filter.NetworkFilterManager; @@ -62,7 +62,7 @@ public Integer call() throws Exception { Network network = NetworkUtils.readNetwork(networkFile); Set nodesWithinArea = new HashSet<>( - ZonalSystems.selectNodesWithinArea(network.getNodes().values(), ShpGeometryUtils.loadPreparedGeometries(IOUtils.resolveFileOrResource(shapeFile)))); + ZoneSystemUtils.selectNodesWithinArea(network.getNodes().values(), ShpGeometryUtils.loadPreparedGeometries(IOUtils.resolveFileOrResource(shapeFile)))); NetworkFilterManager networkFilterManager = new NetworkFilterManager(network, new NetworkConfigGroup()); networkFilterManager.addLinkFilter( diff --git a/src/main/java/org/matsim/analysis/postAnalysis/NoiseAverageAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/NoiseAverageAnalysis.java new file mode 100644 index 0000000..dc4593f --- /dev/null +++ b/src/main/java/org/matsim/analysis/postAnalysis/NoiseAverageAnalysis.java @@ -0,0 +1,246 @@ +package org.matsim.analysis.postAnalysis; + +import org.apache.avro.file.CodecFactory; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.file.FileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.avro.util.Utf8; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.matsim.application.CommandSpec; +import org.matsim.application.MATSimAppCommand; +import org.matsim.application.avro.XYTData; +import org.matsim.application.options.CsvOptions; +import org.matsim.application.options.InputOptions; +import org.matsim.application.options.OutputOptions; +import org.matsim.core.utils.io.IOUtils; +import picocli.CommandLine; +import tech.tablesaw.api.ColumnType; +import tech.tablesaw.api.Row; +import tech.tablesaw.api.Table; +import tech.tablesaw.io.csv.CsvReadOptions; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + +import static org.matsim.application.ApplicationUtils.globFile; + +@CommandLine.Command(name = "average-noise", description = "Calculates average noise stats based on several sim runs with different random seeds.") +@CommandSpec( + requires = {"runs"}, + produces = {"mean_emission_per_day.csv", "mean_immission_per_day.avro", "mean_immission_per_hour.avro"} +) +public class NoiseAverageAnalysis implements MATSimAppCommand { + private final Logger log = LogManager.getLogger(NoiseAverageAnalysis.class); + @CommandLine.Mixin + private InputOptions input = InputOptions.ofCommand(NoiseAverageAnalysis.class); + @CommandLine.Mixin + private OutputOptions output = OutputOptions.ofCommand(NoiseAverageAnalysis.class); + @CommandLine.Option(names = "--no-runs", defaultValue = "5", description = "Number of simulation runs to be averaged.") + private Integer noRuns; + + private final CsvOptions csv = new CsvOptions(); + private static final String ANALYSIS_DIR = "/analysis/noise"; + private static final String LINK_ID = "Link Id"; + private static final String VALUE = "value"; + private List imissionsPerDay = new ArrayList<>(); + private List imissionsPerHour = new ArrayList<>(); + private Map> emissionsPerDay = new HashMap<>(); + private Map meanEmissionsPerDay = new HashMap<>(); + + + public static void main(String[] args) { + new NoiseAverageAnalysis().execute(args); + } + + @Override + public Integer call() throws Exception { + String runs = input.getPath("runs"); + + List foldersSeeded = Arrays.stream(runs.split(",")).toList(); + + // add stats from every run to map + for (String folder : foldersSeeded) { + final Path analysisDir = Path.of(folder + ANALYSIS_DIR); + String emissionsCsv = globFile(analysisDir, "*emission_per_day.csv*").toString(); + String imissionsPerDayAvro = globFile(analysisDir, "*immission_per_day.avro*").toString(); + String imissionsPerHourAvro = globFile(analysisDir, "*immission_per_day.avro*").toString(); + +// read + Table emissions = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(emissionsCsv)) + .columnTypesPartial(Map.of(LINK_ID, ColumnType.STRING, VALUE, ColumnType.DOUBLE)) + .sample(false) + .separator(csv.detectDelimiter(emissionsCsv)).build()); + +// read avro file + readAvroFile(imissionsPerDayAvro, imissionsPerDay); + readAvroFile(imissionsPerHourAvro, imissionsPerHour); + +// get all emission stats + for (int i = 0; i < emissions.rowCount(); i++) { + Row row = emissions.row(i); + + if (!emissionsPerDay.containsKey(row.getString(LINK_ID))) { + emissionsPerDay.put(row.getString(LINK_ID), new ArrayList<>()); + } + emissionsPerDay.get(row.getString(LINK_ID)).add(row.getDouble(VALUE)); + } + } + +// calc emission means and write to mean map + for (Map.Entry> e : emissionsPerDay.entrySet()) { + AtomicReference sum = new AtomicReference<>(0.); + e.getValue().forEach(d -> sum.set(sum.get() + d)); + + meanEmissionsPerDay.put(e.getKey(), sum.get() / e.getValue().size()); + } + +// calc avro means + XYTData imissionsPerDayMean = calcAvroMeans(imissionsPerDay); + XYTData imissionsPerHourMean = calcAvroMeans(imissionsPerHour); + + +// write emission mean stats + try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath("mean_emission_per_day.csv")), CSVFormat.DEFAULT)) { + printer.printRecord(LINK_ID, VALUE); + + for (Map.Entry e : meanEmissionsPerDay.entrySet()) { + printer.printRecord(e.getKey(), e.getValue()); + } + } + +// write avro mean files + writeAvro(imissionsPerDayMean, new File(output.getPath("mean_immission_per_day.avro").toString())); + writeAvro(imissionsPerHourMean, new File(output.getPath("mean_immission_per_hour.avro").toString())); + + return 0; + } + + private void writeAvro(XYTData xytData, File outputFile) { + DatumWriter datumWriter = new SpecificDatumWriter<>(XYTData.class); + try (DataFileWriter dataFileWriter = new DataFileWriter<>(datumWriter)) { + dataFileWriter.setCodec(CodecFactory.deflateCodec(9)); + dataFileWriter.create(xytData.getSchema(), IOUtils.getOutputStream(IOUtils.getFileUrl(outputFile.toString()), false)); + dataFileWriter.append(xytData); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private XYTData calcAvroMeans(List recordList) { + String crs = null; + List xCoords = new ArrayList<>(); + List yCoords = new ArrayList<>(); + List timeStamps = new ArrayList<>(); + Map> data = new HashMap<>(); + + for (GenericRecord genericRecord: recordList) { + + String object0 = genericRecord.get(0).toString(); + Object object1 = genericRecord.get(1); + Object object2 = genericRecord.get(2); + Object object3 = genericRecord.get(3); + Object object4 = genericRecord.get(4); + + if (crs == null) { + crs = object0; + } + + getCoordData(object1, xCoords); + getCoordData(object2, yCoords); + + +// TODO: for the example data even for the hourly data there was only one time stamp. This might be different with real data. this needs to be checked + if (object3 instanceof GenericData.Array) { + List ints = new ArrayList<>((GenericData.Array) object3); + + if (!timeStamps.equals(ints)) { + if (timeStamps.isEmpty()) { + timeStamps.addAll(ints); + } else { + log.error("List of time stamps from the different run seeds are not identical, this should not happen. Abort."); + throw new IllegalArgumentException(); + } + } + } + + // there should be only one key in the map + if (object4 instanceof HashMap) { + List values = new ArrayList<>(); + + for (Map.Entry entry : ((HashMap) object4).entrySet()) { + if (entry.getKey() instanceof Utf8 && entry.getKey().toString().equals("imissions") && entry.getValue() instanceof GenericData.Array) { + values.addAll((GenericData.Array) entry.getValue()); + + String entryString = ((Utf8) entry.getKey()).toString(); + + if (data.get(entryString) == null) { +// if map = list (which is its only value) is empty: set values as list (it is the first iteration of this for loop) + data.put(entryString, values); + } else { +// if there already is an entry in the map, take the values from list and update them + for (Float f : data.get(entryString)) { + data.get(entryString).set(data.get(entryString).indexOf(f), f + values.get(data.get(entryString).indexOf(f))); + } + } + } + } + } + } + +// calc mean values for each datapoint out of sums and number of records (1 record = 1 run seed) + data.entrySet() + .stream() + .filter(entry -> entry.getKey().equals("imissions")) + .forEach(entry -> entry.getValue() + .forEach(value -> entry.getValue().set(entry.getValue().indexOf(value), value / recordList.size()))); + + return new XYTData(crs, xCoords, yCoords, timeStamps, data); + } + + private void getCoordData(Object object, List target) { + if (object instanceof GenericData.Array) { + List floats = new ArrayList<>((GenericData.Array) object); + + if (!target.equals(floats)) { + if (target.isEmpty()) { + target.addAll(floats); + } else { + log.error("List of coords from the different run seeds are not identical, this should not happen. Abort."); + throw new IllegalArgumentException(); + } + } + } + } + + private void readAvroFile(String input, List target) { + try { + // Read the schema from the Avro file + FileReader fileReader = DataFileReader.openReader(new File(input), new GenericDatumReader<>()); + + // Print the schema + log.info("Reading .avro file from {} with schema {}.", input, fileReader.getSchema()); + + // Read records and save to list + while (fileReader.hasNext()) { + target.add(fileReader.next()); + } + + fileReader.close(); + } catch (IOException e) { + log.error(e); + } + } +} diff --git a/src/main/java/org/matsim/analysis/postAnalysis/TransformCoordinatesTripCSV.java b/src/main/java/org/matsim/analysis/postAnalysis/TransformCoordinatesTripCSV.java index 4f61c5f..d8244db 100644 --- a/src/main/java/org/matsim/analysis/postAnalysis/TransformCoordinatesTripCSV.java +++ b/src/main/java/org/matsim/analysis/postAnalysis/TransformCoordinatesTripCSV.java @@ -44,7 +44,9 @@ private TransformCoordinatesTripCSV(){} public static void main(String[] args) { String path = "C:/Users/Tilmann/tubCloud/VSP_WiMi/Projekte/KelRide/2023-10-results-exchange-VIA/AV-speed-mps-5/SAR2023-AV5/seed-5-SAR2023"; - Iterator files = FileUtils.iterateFiles(new File(path), new WildcardFileFilter(Arrays.asList("*trips*", "*legs*")), null); + WildcardFileFilter.Builder builder = WildcardFileFilter.builder().setWildcards(Arrays.asList("*trips*", "*legs*")); + + Iterator files = FileUtils.iterateFiles(new File(path), builder.get(), null); files.forEachRemaining(file -> process(file)); } @@ -58,13 +60,15 @@ private static void process(File input) { CoordinateTransformation transformer = TransformationFactory.getCoordinateTransformation("EPSG:25832", TransformationFactory.WGS84); + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(';'); + try { CSVParser reader = new CSVParser(IOUtils.getBufferedReader(input.getAbsolutePath()), - CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader()); + format.setHeader().setSkipHeaderRecord(true).build()); String[] header = reader.getHeaderNames().toArray(new String[0]); CSVPrinter writer = new CSVPrinter(IOUtils.getBufferedWriter(output), - CSVFormat.DEFAULT.withDelimiter(';').withHeader(header)); + format.setHeader(header).build()); Map headerMap = reader.getHeaderMap(); diff --git a/src/main/java/org/matsim/analysis/postAnalysis/drt/CalcEuclideanDistances.java b/src/main/java/org/matsim/analysis/postAnalysis/drt/CalcEuclideanDistances.java index 86d95b4..24fe033 100644 --- a/src/main/java/org/matsim/analysis/postAnalysis/drt/CalcEuclideanDistances.java +++ b/src/main/java/org/matsim/analysis/postAnalysis/drt/CalcEuclideanDistances.java @@ -53,10 +53,10 @@ public static void main(String[] args) { String output = input.toString().substring(0, input.toString().lastIndexOf('.')) + "_withDistance.csv"; CSVPrinter writer; - try (CSVParser parser = new CSVParser(Files.newBufferedReader(input), - CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader())) { + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(';'); + try (CSVParser parser = new CSVParser(Files.newBufferedReader(input), format.build())) { - writer = new CSVPrinter(new FileWriter(output), CSVFormat.DEFAULT.withDelimiter(';')); + writer = new CSVPrinter(new FileWriter(output), format.build()); for (CSVRecord row : parser.getRecords()) { diff --git a/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtPostProcessingAverageAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtPostProcessingAverageAnalysis.java new file mode 100644 index 0000000..035d0c2 --- /dev/null +++ b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtPostProcessingAverageAnalysis.java @@ -0,0 +1,201 @@ +package org.matsim.analysis.postAnalysis.drt; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.matsim.application.CommandSpec; +import org.matsim.application.MATSimAppCommand; +import org.matsim.application.options.CsvOptions; +import org.matsim.application.options.InputOptions; +import org.matsim.application.options.OutputOptions; +import org.matsim.core.utils.io.IOUtils; +import picocli.CommandLine; +import tech.tablesaw.api.ColumnType; +import tech.tablesaw.api.Row; +import tech.tablesaw.api.Table; +import tech.tablesaw.io.csv.CsvReadOptions; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.LocalTime; +import java.util.*; + +import static org.matsim.application.ApplicationUtils.globFile; + +@CommandLine.Command(name = "average-drt", description = "Calculates average drt stats based on several sim runs with different random seeds.") +@CommandSpec( + requires = {"runs", "mode"}, + produces = {"rides_per_veh_avg_demand_stats.csv", "avg_wait_time_avg_demand_stats.csv", "requests_avg_demand_stats.csv", "avg_total_travel_time_avg_demand_stats.csv", + "rides_avg_demand_stats.csv", "avg_direct_distance_[km]_avg_demand_stats.csv", "rejections_avg_demand_stats.csv", "95th_percentile_wait_time_avg_demand_stats.csv", + "avg_in-vehicle_time_avg_demand_stats.csv", "avg_ride_distance_[km]_avg_demand_stats.csv", "rejection_rate_avg_demand_stats.csv", + "avg_fare_[MoneyUnit]_avg_demand_stats.csv", "total_service_hours_avg_supply_stats.csv", "pooling_ratio_avg_supply_stats.csv", "detour_ratio_avg_supply_stats.csv", + "total_vehicle_mileage_[km]_avg_supply_stats.csv", "empty_ratio_avg_supply_stats.csv", "number_of_stops_avg_supply_stats.csv", "total_pax_distance_[km]_avg_supply_stats.csv", "vehicles_avg_supply_stats.csv"} +) +public class DrtPostProcessingAverageAnalysis implements MATSimAppCommand { + + @CommandLine.Mixin + private InputOptions input = InputOptions.ofCommand(DrtPostProcessingAverageAnalysis.class); + @CommandLine.Mixin + private OutputOptions output = OutputOptions.ofCommand(DrtPostProcessingAverageAnalysis.class); + @CommandLine.Option(names = "--no-runs", defaultValue = "5", description = "Number of simulation runs to be averaged.") + private Integer noRuns; + + private final Map> demandStats = new HashMap<>(); + private final Map> supplyStats = new HashMap<>(); + private final Map demandAvgs = new HashMap<>(); + private final Map supplyAvgs = new HashMap<>(); + Map> params = new HashMap<>(); + + private final CsvOptions csv = new CsvOptions(); + + String supplyInfo = "info[titleCase]"; + String value = "value"; + + public static void main(String[] args) { + new DrtPostProcessingAverageAnalysis().execute(args); + } + + @Override + public Integer call() throws Exception { + + String runs = input.getPath("runs"); + + List foldersSeeded = Arrays.stream(runs.split(",")).toList(); + +// add stats from every run to map + for (String folder : foldersSeeded) { + + String demandKpiCsv = globFile(Path.of(folder + "/analysis/" + input.getPath("mode")), "*demand_kpi.csv*").toString(); + String supplyKpiCsv = globFile(Path.of(folder + "/analysis/" + input.getPath("mode")), "*supply_kpi.csv*").toString(); + + Table demand = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(demandKpiCsv)) + .sample(false) + .separator(csv.detectDelimiter(demandKpiCsv)).build()); + + Table supply = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(supplyKpiCsv)) + .sample(false) + .separator(csv.detectDelimiter(supplyKpiCsv)).build()); + +// get all demand stats + for (int i = 0; i < demand.rowCount(); i++) { + Row row = demand.row(i); + + if (!demandStats.containsKey(row.getString("Info"))) { + demandStats.put(row.getString("Info"), new ArrayList<>()); + } + +// some values are in format hh:mm:ss or empty + if (row.getString(value).isEmpty()) { + demandStats.get(row.getString("Info")).add(0.); + } else if (row.getString(value).contains(":")) { + demandStats.get(row.getString("Info")).add((double) LocalTime.parse(row.getString(value)).toSecondOfDay()); + } else { + demandStats.get(row.getString("Info")).add(Double.parseDouble(row.getString(value))); + } + } + +// get all supply stats + for (int i = 0; i < supply.rowCount(); i++) { + Row row = supply.row(i); + + if (!supplyStats.containsKey(row.getString(supplyInfo))) { + supplyStats.put(row.getString(supplyInfo), new ArrayList<>()); + } + + if (row.getColumnType(value) == ColumnType.INTEGER) { + supplyStats.get(row.getString(supplyInfo)).add((double) row.getInt(value)); + } else { + supplyStats.get(row.getString(supplyInfo)).add(row.getDouble(value)); + } + } + } + + fillAvgMap(demandStats, demandAvgs); + fillAvgMap(supplyStats, supplyAvgs); + + params.put("avg_demand_stats.csv", List.of("rides_per_veh", "avg_wait_time", "requests", "avg_total_travel_time", "rides", "avg_direct_distance_[km]", + "rejections", "95th_percentile_wait_time", "avg_in-vehicle_time", "avg_ride_distance_[km]", "rejection_rate", "avg_fare_[MoneyUnit]")); + params.put("avg_supply_stats.csv", List.of("total_service_hours", "pooling_ratio", "detour_ratio", "total_vehicle_mileage_[km]", "empty_ratio", "number_of_stops", + "total_pax_distance_[km]", "vehicles")); + + for (Map.Entry> e : params.entrySet()) { + for (String param : params.get(e.getKey())) { + if (e.getKey().contains("demand")) { + writeFile(e.getKey(), demandAvgs, param); + } else { + writeFile(e.getKey(), supplyAvgs, param); + } + } + } + + return 0; + } + + private void writeFile(String fileName, Map values, String param) throws IOException { + try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath(param + "_" + fileName)), CSVFormat.DEFAULT)) { + + printer.printRecord("info", value); + + for (Map.Entry e : values.entrySet()) { + String transformed = e.getKey().toLowerCase().replace(".", "").replace(" ", "_"); + if (transformed.contains(param)) { + printer.printRecord("mean-" + e.getKey(), e.getValue()[0]); + printer.printRecord("median-" + e.getKey(), e.getValue()[1]); + printer.printRecord("sd-" + e.getKey(), e.getValue()[2]); + printer.printRecord("min-" + e.getKey(), e.getValue()[3]); + printer.printRecord("max-" + e.getKey(), e.getValue()[4]); + } + } + } + } + + private void fillAvgMap(Map> source, Map destination) { + for (Map.Entry> e: source.entrySet()) { + + String key = e.getKey(); + Double[] values = new Double[5]; + + double sum = 0.; + + for (double d : source.get(key)) { + sum += d; + } + double mean = sum / source.get(key).size(); + + values[0] = mean; + values[1] = calcMedian(source.get(key)); + values[2] = calcStandardDeviation(source.get(key), mean); + values[3] = Collections.min(source.get(key)); + values[4] = Collections.max(source.get(key)); + + destination.put(key, values); + } + } + + private Double calcStandardDeviation(List values, double mean) { + + double sumSquaredDiff = 0; + for (double num : values) { + sumSquaredDiff += Math.pow(num - mean, 2); + } + + return Math.sqrt(sumSquaredDiff / values.size()); + } + + private Double calcMedian(List values) { + Collections.sort(values); + + int length = values.size(); + // Check if the length of the array is odd or even + if (length % 2 != 0) { + // If odd, return the middle element + return values.get(length / 2); + } else { + // If even, return the average of the two middle elements + int midIndex1 = length / 2 - 1; + int midIndex2 = length / 2; + return (values.get(midIndex1) + values.get(midIndex2)) / 2.0; + } + } +} + diff --git a/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java index 061685e..b1027c2 100644 --- a/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java +++ b/src/main/java/org/matsim/analysis/postAnalysis/drt/DrtServiceQualityAnalysis.java @@ -28,8 +28,8 @@ import org.matsim.core.router.util.LeastCostPathCalculator; import org.matsim.core.router.util.TravelDisutility; import org.matsim.core.router.util.TravelTime; -import org.matsim.core.utils.gis.ShapeFileReader; -import org.matsim.core.utils.gis.ShapeFileWriter; +import org.matsim.core.utils.gis.GeoFileReader; +import org.matsim.core.utils.gis.GeoFileWriter; import org.matsim.core.utils.io.IOUtils; import org.matsim.utils.gis.shp2matsim.ShpGeometryUtils; import org.matsim.vehicles.Vehicle; @@ -115,7 +115,7 @@ public Integer call() throws Exception { List allWaitingTimes = new ArrayList<>(); Map> shpWaitingTimes = null; - Set shpFeatures = new HashSet<>(ShapeFileReader.getAllFeatures(SHPFILE)); + Set shpFeatures = new HashSet<>(GeoFileReader.getAllFeatures(SHPFILE)); for (SimpleFeature shpFeature : shpFeatures) { shpFeature.setAttribute(FEATURE_ORIGINS_ATTRIBUTE_NAME, 0.d); shpFeature.setAttribute(FEATURE_DESTINATIONS_ATTRIBUTE_NAME, 0.d); @@ -140,8 +140,8 @@ public Integer call() throws Exception { } int numOfTrips = 0; - try (CSVParser parser = new CSVParser(Files.newBufferedReader(tripsFile), - CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader())) { + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(';').setHeader().setSkipHeaderRecord(true); + try (CSVParser parser = new CSVParser(Files.newBufferedReader(tripsFile), format.build())) { for (CSVRecord row : parser.getRecords()) { double waitingTime = Double.parseDouble(row.get(9)); @@ -262,7 +262,7 @@ public Integer call() throws Exception { feature.setAttribute(FEATURE_95PCT_WAIT_ATTRIBUTE_NAME, StatUtils.percentile(waitingTimes.stream().mapToDouble(t -> t).toArray(), 95)); } ); - ShapeFileWriter.writeGeometries(shpWaitingTimes.keySet(), outputFolder + "/" + mode + "_serviceZones_waitStats.shp"); + GeoFileWriter.writeGeometries(shpWaitingTimes.keySet(), outputFolder + "/" + mode + "_serviceZones_waitStats.shp"); } return 0; } diff --git a/src/main/java/org/matsim/analysis/postAnalysis/scoring/OutputPlansScoringAnalysis.java b/src/main/java/org/matsim/analysis/postAnalysis/scoring/OutputPlansScoringAnalysis.java index d862e5f..4205e73 100644 --- a/src/main/java/org/matsim/analysis/postAnalysis/scoring/OutputPlansScoringAnalysis.java +++ b/src/main/java/org/matsim/analysis/postAnalysis/scoring/OutputPlansScoringAnalysis.java @@ -59,8 +59,9 @@ public Integer call() throws Exception { List> relevantPersons = new ArrayList<>(); if (!relevantPersonsFile.equals("")) { List relevantPersonsId = new ArrayList<>(); - try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(relevantPersonsFile)), - CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) { + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(',').setHeader().setSkipHeaderRecord(true); + + try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(relevantPersonsFile)), format.build())) { for (CSVRecord row : parser) { relevantPersonsId.add(row.get(0)); } diff --git a/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java b/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java index fa5d9b6..9991437 100644 --- a/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java +++ b/src/main/java/org/matsim/analysis/preAnalysis/ModeShareAnalysis.java @@ -66,8 +66,9 @@ public Integer call() throws Exception { if (!relevantPersonsFile.equals("")) { List relevantPersonsId = new ArrayList<>(); - try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(relevantPersonsFile)), - CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) { + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(',').setHeader().setSkipHeaderRecord(true); + + try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(relevantPersonsFile)), format.build())) { for (CSVRecord row : parser) { relevantPersonsId.add(row.get(0)); } diff --git a/src/main/java/org/matsim/dashboard/AverageDrtDashboard.java b/src/main/java/org/matsim/dashboard/AverageDrtDashboard.java new file mode 100644 index 0000000..df880d8 --- /dev/null +++ b/src/main/java/org/matsim/dashboard/AverageDrtDashboard.java @@ -0,0 +1,202 @@ +package org.matsim.dashboard; + + +import org.matsim.analysis.postAnalysis.drt.DrtPostProcessingAverageAnalysis; +import org.matsim.simwrapper.Dashboard; +import org.matsim.simwrapper.Data; +import org.matsim.simwrapper.Header; +import org.matsim.simwrapper.Layout; +import org.matsim.simwrapper.viz.*; + +import java.util.ArrayList; +import java.util.List; + +/** + * Average DRT dashboard for several runs with the same config but a different random seed. Dashboard for one specific DRT service. + */ +public class AverageDrtDashboard implements Dashboard { + private final List dirs; + private final String mode; + private final Integer noRuns; + + public AverageDrtDashboard(List dirs, String mode, Integer noRuns) { + this.dirs = dirs; + this.mode = mode; + this.noRuns = noRuns; + } + + private String postProcess(Data data, String outputFile) { +// args for analysis have to be: list of paths to run dirs + drt modes / folder names + List args = new ArrayList<>(List.of("--input-runs", String.join(",", dirs), "--input-mode", mode, + "--no-runs", noRuns.toString())); + + return data.compute(DrtPostProcessingAverageAnalysis.class, outputFile, args.toArray(new String[0])); + } + + @Override + public void configure(Header header, Layout layout) { + header.title = mode; + header.description = "Overview for the demand-responsive mode '" + mode + "'. This dashboard shows average values for " + noRuns + + " simulation runs. For the results of the specific runs please choose the according directory next to this dashboard.yaml."; + +// DEMAND + layout.row("one") + .el(Table.class, (viz, data) -> { + viz.title = "Rides per vehicle"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "rides_per_veh_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Avg wait time"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_wait_time_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Requests"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "requests_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + + layout.row("two") + .el(Table.class, (viz, data) -> { + viz.title = "Avg total travel time"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_total_travel_time_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Rides"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "rides_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Avg direct distance [km]"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_direct_distance_[km]_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + + layout.row("three") + .el(Table.class, (viz, data) -> { + viz.title = "Rejections"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "rejections_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "95th percentile wait time"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "95th_percentile_wait_time_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Avg in-vehicle time"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_in-vehicle_time_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + + layout.row("four") + .el(Table.class, (viz, data) -> { + viz.title = "Avg ride distance [km]"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_ride_distance_[km]_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Rejection rate"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "rejection_rate_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Avg fare [MoneyUnit]"; + viz.description = "Final demand statistics and KPI."; + viz.dataset = postProcess(data, "avg_fare_[MoneyUnit]_avg_demand_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + +// SUPPLY + supplyTabs(layout); + } + + private void supplyTabs(Layout layout) { + layout.row("six") + .el(Table.class, (viz, data) -> { + viz.title = "Total service hours"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "total_service_hours_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Pooling ratio"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "pooling_ratio_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Detour ratio"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "detour_ratio_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + + layout.row("seven") + .el(Table.class, (viz, data) -> { + viz.title = "Total vehicle mileage [km]"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "total_vehicle_mileage_[km]_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Empty ratio"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "empty_ratio_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Number of stops"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "number_of_stops_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + + layout.row("eight") + .el(Table.class, (viz, data) -> { + viz.title = "Total pax distance [km]"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "total_pax_distance_[km]_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }) + .el(Table.class, (viz, data) -> { + viz.title = "Vehicles"; + viz.description = "Final configuration and service KPI."; + viz.dataset = postProcess(data, "vehicles_avg_supply_stats.csv"); + viz.showAllRows = true; + viz.width = 1.; + }); + } +} diff --git a/src/main/java/org/matsim/dashboard/AverageKelheimEmissionsDashboard.java b/src/main/java/org/matsim/dashboard/AverageKelheimEmissionsDashboard.java new file mode 100644 index 0000000..526e24b --- /dev/null +++ b/src/main/java/org/matsim/dashboard/AverageKelheimEmissionsDashboard.java @@ -0,0 +1,118 @@ +/* *********************************************************************** * + * project: org.matsim.* + * Controler.java + * * + * *********************************************************************** * + * * + * copyright : (C) 2007 by the members listed in the COPYING, * + * LICENSE and WARRANTY file. * + * email : info at matsim dot org * + * * + * *********************************************************************** * + * * + * This program is free software; you can redistribute it and/or modify * + * it under the terms of the GNU General Public License as published by * + * the Free Software Foundation; either version 2 of the License, or * + * (at your option) any later version. * + * See also COPYING, LICENSE and WARRANTY file * + * * + * *********************************************************************** */ + +package org.matsim.dashboard; + +import org.matsim.analysis.postAnalysis.EmissionsPostProcessingAverageAnalysis; +import org.matsim.simwrapper.Dashboard; +import org.matsim.simwrapper.Data; +import org.matsim.simwrapper.Header; +import org.matsim.simwrapper.Layout; +import org.matsim.simwrapper.viz.GridMap; +import org.matsim.simwrapper.viz.Links; +import org.matsim.simwrapper.viz.Table; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * Average emissions dashboard for several runs with the same config but a different random seed. + */ +public class AverageKelheimEmissionsDashboard implements Dashboard { + private final List dirs; + private final Integer noRuns; + private final String pathToCsvBase; + + public AverageKelheimEmissionsDashboard(List dirs, Integer noRuns) { + this.dirs = dirs; + this.noRuns = noRuns; + this.pathToCsvBase = null; + } + + public AverageKelheimEmissionsDashboard(List dirs, Integer noRuns, String pathToBaseRun) { + this.dirs = dirs; + this.noRuns = noRuns; + + if (!pathToBaseRun.endsWith("/")) { + pathToBaseRun += "/"; + } + this.pathToCsvBase = pathToBaseRun + "analysis/emissions/emissions_per_link_per_m.csv"; + } + + private String postProcess(Data data, String outputFile) { +// args for analysis have to be: list of paths to run dirs + drt modes / folder names + List args = new ArrayList<>(List.of("--input-runs", String.join(",", dirs), "--no-runs", noRuns.toString())); + + return data.compute(EmissionsPostProcessingAverageAnalysis.class, outputFile, args.toArray(new String[0])); + } + + /** + * Produces the dashboard. + */ + public void configure(Header header, Layout layout) { + header.title = "Average Emissions"; + header.description = "Shows the average emissions footprint and spatial distribution for several simulation runs."; + + String linkDescription = "Displays the emissions for each link per meter. Be aware that emission values are provided in the simulation sample size!"; + if (pathToCsvBase != null){ + linkDescription += String.format("%n Base is %s", pathToCsvBase); + } + String finalLinkDescription = linkDescription; + + layout.row("links") + .el(Table.class, (viz, data) -> { + viz.title = "Emissions"; + viz.description = "by pollutant. Total values are scaled from the simulation sample size to 100%."; + viz.dataset = postProcess(data, "mean_emissions_total.csv"); + viz.enableFilter = false; + viz.showAllRows = true; + viz.width = 1.0; + }) + .el(Links.class, (viz, data) -> { + viz.title = "Emissions per Link per Meter"; + viz.description = finalLinkDescription; + viz.height = 12.0; + viz.datasets.csvFile = postProcess(data, "mean_emissions_per_link_per_m.csv"); + viz.datasets.csvBase = Path.of(this.dirs.get(0)).getParent().relativize(Path.of(pathToCsvBase)).toString(); + viz.network = new CreateAverageDashboards().copyGeoJsonNetwork(dirs); + viz.display.color.columnName = "CO2_TOTAL [g/m]"; + viz.display.color.dataset = "csvFile"; + viz.display.width.scaleFactor = 100; + viz.display.width.columnName = "CO2_TOTAL [g/m]"; + viz.display.width.dataset = "csvFile"; + viz.center = data.context().getCenter(); + viz.width = 3.0; + }); + layout.row("second").el(GridMap.class, (viz, data) -> { + viz.title = "CO₂ Emissions"; + viz.description = "per day. Be aware that CO2 values are provided in the simulation sample size!"; + viz.height = 12.0; + viz.file = postProcess(data, "mean_emissions_grid_per_day.xyt.csv"); + }); + layout.row("third") + .el(GridMap.class, (viz, data) -> { + viz.title = "CO₂ Emissions"; + viz.description = "per hour. Be aware that CO2 values are provided in the simulation sample size!"; + viz.height = 12.; + viz.file = postProcess(data, "mean_emissions_grid_per_hour.csv"); + }); + } +} diff --git a/src/main/java/org/matsim/dashboard/AverageKelheimNoiseDashboard.java b/src/main/java/org/matsim/dashboard/AverageKelheimNoiseDashboard.java new file mode 100644 index 0000000..cb83ae7 --- /dev/null +++ b/src/main/java/org/matsim/dashboard/AverageKelheimNoiseDashboard.java @@ -0,0 +1,95 @@ +package org.matsim.dashboard; + +import org.matsim.analysis.postAnalysis.NoiseAverageAnalysis; +import org.matsim.simwrapper.Dashboard; +import org.matsim.simwrapper.Data; +import org.matsim.simwrapper.Header; +import org.matsim.simwrapper.Layout; +import org.matsim.simwrapper.viz.ColorScheme; +import org.matsim.simwrapper.viz.GridMap; +import org.matsim.simwrapper.viz.MapPlot; + +import java.util.ArrayList; +import java.util.List; + +/** + * Shows emission in the scenario. + */ +public class AverageKelheimNoiseDashboard implements Dashboard { + + private double minDb = 40; + private double maxDb = 80; + private final List dirs; + private final Integer noRuns; + private static final String NOISE = "noise"; + private static final String DARK_BLUE = "#1175b3"; + private static final String LIGHT_BLUE = "#95c7df"; + private static final String ORANGE = "#f4a986"; + private static final String RED = "#cc0c27"; + + public AverageKelheimNoiseDashboard(List dirs, Integer noRuns) { + this.dirs = dirs; + this.noRuns = noRuns; + } + + private String postProcess(Data data, String outputFile) { +// args for analysis have to be: list of paths to run dirs + drt modes / folder names + List args = new ArrayList<>(List.of("--input-runs", String.join(",", dirs), "--no-runs", noRuns.toString())); + + return data.compute(NoiseAverageAnalysis.class, outputFile, args.toArray(new String[0])); + } + + @Override + public void configure(Header header, Layout layout) { + + header.title = "Average Noise"; + header.description = "Shows the average noise footprint and spatial distribution for several simulation runs."; + + layout.row("aggregate noise") + .el(GridMap.class, (viz, data) -> { + viz.title = "Noise Immissions (Grid)"; + viz.description = "Aggregate Noise Immissions per day"; + viz.height = 12.0; + viz.cellSize = 250; + viz.opacity = 0.2; + viz.maxHeight = 20; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.setColorRamp(new double[]{40, 50, 60}, new String[]{DARK_BLUE, LIGHT_BLUE, ORANGE, RED}); + viz.file = postProcess(data, "mean_immission_per_day.avro"); + }) + .el(MapPlot.class, (viz, data) -> { + viz.title = "Noise Emissions (Link)"; + viz.description = "Aggregate Noise Emissions per day"; + viz.height = 12.0; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.minValue = minDb; + viz.maxValue = maxDb; + viz.setShape(new CreateAverageDashboards().copyGeoJsonNetwork(dirs)); + viz.addDataset(NOISE, postProcess(data, "mean_emission_per_day.csv")); + viz.display.lineColor.dataset = NOISE; + viz.display.lineColor.columnName = "value"; + viz.display.lineColor.join = "Link Id"; + viz.display.lineColor.fixedColors = new String[]{DARK_BLUE, LIGHT_BLUE, ORANGE, RED}; + viz.display.lineColor.setColorRamp(ColorScheme.RdYlBu, 4, true, "45, 55, 65"); + viz.display.lineWidth.dataset = NOISE; + viz.display.lineWidth.columnName = "value"; + viz.display.lineWidth.scaleFactor = 8d; + viz.display.lineWidth.join = "Link Id"; + }); + layout.row("hourly noise") + .el(GridMap.class, (viz, data) -> { + viz.title = "Hourly Noise Immissions (Grid)"; + viz.description = "Noise Immissions per hour"; + viz.height = 12.0; + viz.cellSize = 250; + viz.opacity = 0.2; + viz.maxHeight = 20; + viz.center = data.context().getCenter(); + viz.zoom = data.context().mapZoomLevel; + viz.setColorRamp(new double[]{40, 50, 60}, new String[]{DARK_BLUE, LIGHT_BLUE, ORANGE, RED}); + viz.file = postProcess(data, "mean_immission_per_hour.avro"); + }); + } +} diff --git a/src/main/java/org/matsim/dashboard/CreateAverageDashboards.java b/src/main/java/org/matsim/dashboard/CreateAverageDashboards.java new file mode 100644 index 0000000..d4c1f9e --- /dev/null +++ b/src/main/java/org/matsim/dashboard/CreateAverageDashboards.java @@ -0,0 +1,104 @@ +package org.matsim.dashboard; + +import org.matsim.api.core.v01.TransportMode; +import org.matsim.application.MATSimAppCommand; +import org.matsim.simwrapper.Dashboard; +import org.matsim.simwrapper.SimWrapper; +import picocli.CommandLine; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * class to create average dashboards and run the necessary analysis for that. + */ +public class CreateAverageDashboards implements MATSimAppCommand { + @CommandLine.Option(names = "--input-path", required = true, description = "Path to directory with run directories.") + private String inputPath; + @CommandLine.Option(names = "--no-runs", defaultValue = "5", description = "Number of simulation runs to be averaged.") + private Integer noRuns; + @CommandLine.Option(names = "--base-run", description = "Path to directory base run.", defaultValue = "/net/ils/matsim-kelheim/v3.0-release/output-base/25pct") + private String pathToBaseRun; + + public static void main(String[] args) { + new CreateAverageDashboards().execute(args); + } + + CreateAverageDashboards() { + + } + + + + @Override + public Integer call() throws Exception { + // Collect all folder names + File[] foldersList = new File(inputPath).listFiles(); + List foldersSeeded = new ArrayList<>(); + + String analysisDir = ""; + + for (File folder : Objects.requireNonNull(foldersList)) { + if (!folder.isDirectory() || !folder.getAbsolutePath().contains("seed")) continue; + + String absPath = folder.getAbsolutePath(); + + foldersSeeded.add(absPath); + + if (analysisDir.isEmpty()) { + analysisDir = absPath + "/analysis"; + } + } + +// get drt modes for different dashboards from analysis folder of one run + List modes = new ArrayList<>(); + + Arrays.stream(new File(analysisDir).listFiles()) + .filter(d -> d.getAbsolutePath().contains(TransportMode.drt)) + .forEach(f -> modes.add(f.getAbsolutePath().substring(f.getAbsolutePath().lastIndexOf("\\") + 1))); + + SimWrapper sw = SimWrapper.create(); + + for (String m : modes) { + Dashboard.Customizable d = Dashboard.customize(new AverageDrtDashboard(foldersSeeded, m, noRuns)) + .context(m); + + sw.addDashboard(d); + } + + sw.addDashboard(Dashboard.customize(new AverageKelheimEmissionsDashboard(foldersSeeded, noRuns, pathToBaseRun)).context("emissions")); + sw.addDashboard(Dashboard.customize(new AverageKelheimNoiseDashboard(foldersSeeded, noRuns)).context("noise")); + sw.generate(Path.of(inputPath), true); + sw.run(Path.of(inputPath)); + + return 0; + } + + /** + * A helper method to copy an already existing Geojson network rather than creating it all over again. + */ + String copyGeoJsonNetwork(List dirs) { + + for (String dir : dirs) { + File networkFile = new File(dir + "/analysis/network/network.geojson"); + Path target = Path.of(Path.of(dir).getParent() + "/analysis/network"); + + if (Files.notExists(target) && networkFile.exists() && networkFile.isFile()) { + try { + Files.createDirectories(target); + Files.copy(networkFile.toPath(), Path.of(target + "/network.geojson")); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + } + return "analysis/network/network.geojson"; + } +} diff --git a/src/main/java/org/matsim/analysis/KelheimDashboardProvider.java b/src/main/java/org/matsim/dashboard/KelheimDashboardProvider.java similarity index 87% rename from src/main/java/org/matsim/analysis/KelheimDashboardProvider.java rename to src/main/java/org/matsim/dashboard/KelheimDashboardProvider.java index 90382d2..19a478d 100644 --- a/src/main/java/org/matsim/analysis/KelheimDashboardProvider.java +++ b/src/main/java/org/matsim/dashboard/KelheimDashboardProvider.java @@ -1,4 +1,4 @@ -package org.matsim.analysis; +package org.matsim.dashboard; import org.matsim.analysis.emissions.KelheimEmissionsDashboard; import org.matsim.core.config.Config; @@ -6,6 +6,7 @@ import org.matsim.simwrapper.Dashboard; import org.matsim.simwrapper.DashboardProvider; import org.matsim.simwrapper.SimWrapper; +import org.matsim.simwrapper.dashboard.NoiseDashboard; import org.matsim.simwrapper.dashboard.TravelTimeComparisonDashboard; import org.matsim.simwrapper.dashboard.TripDashboard; @@ -26,7 +27,8 @@ public List getDashboards(Config config, SimWrapper simWrapper) { return List.of( trips, new TravelTimeComparisonDashboard(IOUtils.resolveFileOrResource( "kelheim-v3.0-routes-ref.csv.gz").toString()), - new KelheimEmissionsDashboard() + new KelheimEmissionsDashboard(), + new NoiseDashboard() ); } diff --git a/src/main/java/org/matsim/drtFare/KelheimDrtFareHandler.java b/src/main/java/org/matsim/drtFare/KelheimDrtFareHandler.java index 0074f75..d689c1d 100644 --- a/src/main/java/org/matsim/drtFare/KelheimDrtFareHandler.java +++ b/src/main/java/org/matsim/drtFare/KelheimDrtFareHandler.java @@ -18,7 +18,7 @@ import org.matsim.contrib.dvrp.passenger.PassengerRequestRejectedEventHandler; import org.matsim.core.api.experimental.events.EventsManager; import org.matsim.core.utils.geometry.geotools.MGC; -import org.matsim.core.utils.gis.ShapeFileReader; +import org.matsim.core.utils.gis.GeoFileReader; import org.opengis.feature.simple.SimpleFeature; import java.net.MalformedURLException; @@ -45,6 +45,7 @@ public class KelheimDrtFareHandler implements DrtRequestSubmittedEventHandler, P private final Network network; private final Map zonalSystem; + //the boolean determines whether we need to surcharge, which is the case for trips starting and ending in zone 1. private final Map, Boolean> surchargeMap = new HashMap<>(); public KelheimDrtFareHandler(String mode, Network network, KelheimDrtFareParams params) { @@ -79,6 +80,7 @@ public void handleEvent(DrtRequestSubmittedEvent drtRequestSubmittedEvent) { if (drtRequestSubmittedEvent.getMode().equals(mode)) { Link fromLink = network.getLinks().get(drtRequestSubmittedEvent.getFromLinkId()); Link toLink = network.getLinks().get(drtRequestSubmittedEvent.getToLinkId()); +// log.warn("######### Passenger submitted {}, firstPerson = {}, lastPerson={}, event = {}", drtRequestSubmittedEvent.getRequestId(), drtRequestSubmittedEvent.getPersonIds().getFirst(), drtRequestSubmittedEvent.getPersonIds().getLast(), drtRequestSubmittedEvent); if (!zonalSystem.isEmpty()) { if (zonalSystem.get("1") == null) { throw new RuntimeException("The shape file data entry is not prepared correctly. " + @@ -105,6 +107,7 @@ public void handleEvent(DrtRequestSubmittedEvent drtRequestSubmittedEvent) { public void handleEvent(PassengerDroppedOffEvent event) { if (event.getMode().equals(mode)) { double actualFare = baseFare; +// log.warn("######### Passenger dropped off. request = {}, person = {}, event = {}", event.getRequestId(), event.getPersonId(), event); boolean doesSurchargeApply = surchargeMap.get(event.getRequestId()); if (doesSurchargeApply) { actualFare = actualFare + zone2Surcharge; @@ -112,12 +115,17 @@ public void handleEvent(PassengerDroppedOffEvent event) { events.processEvent( new PersonMoneyEvent(event.getTime(), event.getPersonId(), -actualFare, DrtFareHandler.PERSON_MONEY_EVENT_PURPOSE_DRT_FARE, mode, event.getRequestId().toString())); - surchargeMap.remove(event.getRequestId()); + + /*there are potentially multiple PassengerDroppedOffEvents per request (bc of groups), which is why we can't remove the request from the map here + in Kelheim scenarios, we mostly don't have large demand, which is why i don't care so much about the growing map. In other scenarios, one should maybe think about cleaning up + tschlenther, june '24*/ +// surchargeMap.remove(event.getRequestId()); } } @Override public void handleEvent(PassengerRequestRejectedEvent passengerRequestRejectedEvent) { +// log.warn("######### Passenger rejected {}, firstPerson = {}, lastPerson = {}, event = {}", passengerRequestRejectedEvent.getRequestId(), passengerRequestRejectedEvent.getPersonIds().getFirst(), passengerRequestRejectedEvent.getPersonIds().getLast(), passengerRequestRejectedEvent); if (passengerRequestRejectedEvent.getMode().equals(mode)) { surchargeMap.remove(passengerRequestRejectedEvent.getRequestId()); } @@ -139,9 +147,9 @@ private Collection getFeatures(String pathToShapeFile) { } catch (MalformedURLException e) { log.error(e); } - features = ShapeFileReader.getAllFeatures(shapeFileAsURL); + features = GeoFileReader.getAllFeatures(shapeFileAsURL); } else { - features = ShapeFileReader.getAllFeatures(pathToShapeFile); + features = GeoFileReader.getAllFeatures(pathToShapeFile); } return features; } else { diff --git a/src/main/java/org/matsim/run/RunKelheimScenario.java b/src/main/java/org/matsim/run/RunKelheimScenario.java index 3c2d454..7c89df3 100644 --- a/src/main/java/org/matsim/run/RunKelheimScenario.java +++ b/src/main/java/org/matsim/run/RunKelheimScenario.java @@ -182,7 +182,7 @@ protected Config prepareConfig(Config config) { sw.defaultParams().shp = "../shp/dilutionArea.shp"; sw.defaultParams().mapCenter = "11.89,48.91"; sw.defaultParams().mapZoomLevel = 11d; - sw.defaultParams().sampleSize = sample.getSample(); + sw.sampleSize = sample.getSample(); if (intermodal) { ConfigUtils.addOrGetModule(config, PtIntermodalRoutingModesConfigGroup.class); @@ -232,6 +232,9 @@ protected Config prepareConfig(Config config) { // y = ax + b --> b value, for long trips distanceBasedPtFareParams.setLongDistanceTripIntercept(30); + //enable plan inheritance analysis + config.planInheritance().setEnabled(true); + if (iterations != -1) addRunOption(config, "iter", iterations); diff --git a/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java b/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java index 71ac293..d69d3f8 100644 --- a/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java +++ b/src/main/java/org/matsim/run/prepare/DrtStopsWriter.java @@ -102,8 +102,9 @@ private void writeTransitStopsAndVizFiles(Network network) throws IOException { "KEXI_Haltestellen_Liste_Kelheim_utm32n_withLinkIds.csv"); Set> allLinks = new HashSet<>(); - try (CSVParser parser = new CSVParser(IOUtils.getBufferedReader(data), - CSVFormat.DEFAULT.withDelimiter(';').withFirstRecordAsHeader())) { + CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(';').setHeader().setSkipHeaderRecord(true); + + try (CSVParser parser = new CSVParser(IOUtils.getBufferedReader(data), format.build())) { for (CSVRecord row : parser) { Coord coord = new Coord(Double.parseDouble(row.get("x")), Double.parseDouble(row.get("y"))); if (serviceArea == null || MGC.coord2Point(coord).within(serviceArea)) { diff --git a/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java b/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java index 010606d..22b39e4 100644 --- a/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java +++ b/src/main/java/org/matsim/run/prepare/PrepareRealDrtDemand.java @@ -43,6 +43,8 @@ public class PrepareRealDrtDemand implements MATSimAppCommand { @CommandLine.Mixin private CrsOptions crs = new CrsOptions(); + private final CSVFormat.Builder format = CSVFormat.DEFAULT.builder().setDelimiter(',').setHeader().setSkipHeaderRecord(true); + public static void main(String[] args) throws IOException { new PrepareRealDrtDemand().execute(args); } @@ -57,8 +59,7 @@ public Integer call() throws Exception { // Map stationCoordMap = loadStationCoordinates(); - try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(demands)), - CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) { + try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(demands)), format.build())) { int counter = 0; for (CSVRecord row : parser) { double fromX = Double.parseDouble(row.get("from_x")); @@ -99,8 +100,7 @@ public Integer call() throws Exception { private Map loadStationCoordinates() throws IOException { Map stationCoordMap = new HashMap<>(); - try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(drtStops)), - CSVFormat.DEFAULT.withDelimiter(',').withFirstRecordAsHeader())) { + try (CSVParser parser = new CSVParser(Files.newBufferedReader(Path.of(drtStops)), format.build())) { for (CSVRecord row : parser) { String stationName = row.get(0); double x = Double.parseDouble(row.get(2)); diff --git a/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider b/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider index 939a5ed..5a3da0f 100644 --- a/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider +++ b/src/main/resources/META-INF/services/org.matsim.simwrapper.DashboardProvider @@ -1 +1 @@ -org.matsim.analysis.KelheimDashboardProvider \ No newline at end of file +org.matsim.dashboard.KelheimDashboardProvider \ No newline at end of file diff --git a/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java b/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java index 60bee28..f7584e8 100644 --- a/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java +++ b/src/test/java/org/matsim/run/RunKelheimIntegrationTest.java @@ -1,7 +1,6 @@ package org.matsim.run; -import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.matsim.application.MATSimApplication; import org.matsim.core.config.Config; import org.matsim.core.config.ConfigUtils; @@ -9,9 +8,10 @@ import org.matsim.simwrapper.SimWrapperConfigGroup; import org.matsim.testcases.MatsimTestUtils; +/** + * integration test. + */ public class RunKelheimIntegrationTest { - @Rule - public MatsimTestUtils utils = new MatsimTestUtils(); @Test public final void runExamplePopulationTest() {