Skip to content

Commit

Permalink
average noise dashboard
Browse files Browse the repository at this point in the history
  • Loading branch information
simei94 committed May 23, 2024
1 parent a06e7d8 commit 72d10d7
Show file tree
Hide file tree
Showing 8 changed files with 408 additions and 33 deletions.
2 changes: 1 addition & 1 deletion input/test.config.xml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
<parameterset type="AnnealingVariable" >
<param name="annealParameter" value="globalInnovationRate" />
<param name="annealType" value="sigmoid" />
<param name="defaultSubpopulation" value="person" />
<param name="subpopulation" value="person" />
<!-- this parameter enters the exponential and sigmoid formulas. May be an iteration or a share, i.e. 0.5 for halfLife at 50% of iterations. Exponential: startValue / exp(it/halfLife) -->
<param name="halfLife" value="0.5" />
<!-- sigmoid: 1/(1+e^(shapeFactor*(it - halfLife))); geometric: startValue * shapeFactor^it; msa: startValue / it^shapeFactor -->
Expand Down
36 changes: 31 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -88,11 +88,30 @@
</exclusions>
</dependency>

<!-- <dependency>-->
<!-- &lt;!&ndash; Include the JUnit testing library. Not transitive. &ndash;&gt;-->
<!-- <groupId>junit</groupId>-->
<!-- <artifactId>junit</artifactId>-->
<!-- <version>4.13.2</version>-->
<!-- <scope>test</scope>-->
<!-- </dependency>-->

<!-- Test dependencies -->
<dependency>
<!-- Include the JUnit testing library. Not transitive. -->
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>3.24.2</version>
<scope>test</scope>
</dependency>

Expand Down Expand Up @@ -190,6 +209,13 @@
<version>2.16.1</version>
</dependency>

<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version>1.11.3</version>
</dependency>


</dependencies>

<build>
Expand All @@ -208,7 +234,7 @@
<forkCount>1</forkCount>
<reuseForks>false</reuseForks>
<!-- avoid out of memory errors: -->
<argLine>@{argLine} -Xmx6500m -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true</argLine>
<argLine>@{argLine} -Djava.awt.headless=true -Dmatsim.preferLocalDtds=true</argLine>
<!--necessary in tu berlin gitlab. BUT not good in other places, so solve by command line switch only where needed. kai, nov'18-->
<!--<useSystemClassLoader>false</useSystemClassLoader>-->
</configuration>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,246 @@
package org.matsim.analysis.postAnalysis;

import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.util.Utf8;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.matsim.application.CommandSpec;
import org.matsim.application.MATSimAppCommand;
import org.matsim.application.avro.XYTData;
import org.matsim.application.options.CsvOptions;
import org.matsim.application.options.InputOptions;
import org.matsim.application.options.OutputOptions;
import org.matsim.core.utils.io.IOUtils;
import picocli.CommandLine;
import tech.tablesaw.api.ColumnType;
import tech.tablesaw.api.Row;
import tech.tablesaw.api.Table;
import tech.tablesaw.io.csv.CsvReadOptions;

import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;

import static org.matsim.application.ApplicationUtils.globFile;

@CommandLine.Command(name = "average-noise", description = "Calculates average noise stats based on several sim runs with different random seeds.")
@CommandSpec(
requires = {"runs"},
produces = {"mean_emission_per_day.csv", "mean_immission_per_day.avro", "mean_immission_per_hour.avro"}
)
public class NoiseAverageAnalysis implements MATSimAppCommand {
private final Logger log = LogManager.getLogger(NoiseAverageAnalysis.class);
@CommandLine.Mixin
private InputOptions input = InputOptions.ofCommand(NoiseAverageAnalysis.class);
@CommandLine.Mixin
private OutputOptions output = OutputOptions.ofCommand(NoiseAverageAnalysis.class);
@CommandLine.Option(names = "--no-runs", defaultValue = "5", description = "Number of simulation runs to be averaged.")
private Integer noRuns;

private final CsvOptions csv = new CsvOptions();
private static final String ANALYSIS_DIR = "/analysis/noise";
private static final String LINK_ID = "Link Id";
private static final String VALUE = "value";
private List<GenericRecord> imissionsPerDay = new ArrayList<>();
private List<GenericRecord> imissionsPerHour = new ArrayList<>();
private Map<String, List<Double>> emissionsPerDay = new HashMap<>();
private Map<String, Double> meanEmissionsPerDay = new HashMap<>();


public static void main(String[] args) {
new NoiseAverageAnalysis().execute(args);
}

@Override
public Integer call() throws Exception {
String runs = input.getPath("runs");

List<String> foldersSeeded = Arrays.stream(runs.split(",")).toList();

// add stats from every run to map
for (String folder : foldersSeeded) {
final Path analysisDir = Path.of(folder + ANALYSIS_DIR);
String emissionsCsv = globFile(analysisDir, "*emission_per_day.csv*").toString();
String imissionsPerDayAvro = globFile(analysisDir, "*immission_per_day.avro*").toString();
String imissionsPerHourAvro = globFile(analysisDir, "*immission_per_day.avro*").toString();

// read
Table emissions = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(emissionsCsv))
.columnTypesPartial(Map.of(LINK_ID, ColumnType.STRING, VALUE, ColumnType.DOUBLE))
.sample(false)
.separator(csv.detectDelimiter(emissionsCsv)).build());

// read avro file
readAvroFile(imissionsPerDayAvro, imissionsPerDay);
readAvroFile(imissionsPerHourAvro, imissionsPerHour);

// get all emission stats
for (int i = 0; i < emissions.rowCount(); i++) {
Row row = emissions.row(i);

if (!emissionsPerDay.containsKey(row.getString(LINK_ID))) {
emissionsPerDay.put(row.getString(LINK_ID), new ArrayList<>());
}
emissionsPerDay.get(row.getString(LINK_ID)).add(row.getDouble(VALUE));
}
}

// calc emission means and write to mean map
for (Map.Entry<String, List<Double>> e : emissionsPerDay.entrySet()) {
AtomicReference<Double> sum = new AtomicReference<>(0.);
e.getValue().forEach(d -> sum.set(sum.get() + d));

meanEmissionsPerDay.put(e.getKey(), sum.get() / e.getValue().size());
}

// calc avro means
XYTData imissionsPerDayMean = calcAvroMeans(imissionsPerDay);
XYTData imissionsPerHourMean = calcAvroMeans(imissionsPerHour);


// write emission mean stats
try (CSVPrinter printer = new CSVPrinter(Files.newBufferedWriter(output.getPath("mean_emission_per_day.csv")), CSVFormat.DEFAULT)) {
printer.printRecord(LINK_ID, VALUE);

for (Map.Entry<String, Double> e : meanEmissionsPerDay.entrySet()) {
printer.printRecord(e.getKey(), e.getValue());
}
}

// write avro mean files
writeAvro(imissionsPerDayMean, new File(output.getPath("mean_immission_per_day.avro").toString()));
writeAvro(imissionsPerHourMean, new File(output.getPath("mean_immission_per_hour.avro").toString()));

return 0;
}

private void writeAvro(XYTData xytData, File outputFile) {
DatumWriter<XYTData> datumWriter = new SpecificDatumWriter<>(XYTData.class);
try (DataFileWriter<XYTData> dataFileWriter = new DataFileWriter<>(datumWriter)) {
dataFileWriter.setCodec(CodecFactory.deflateCodec(9));
dataFileWriter.create(xytData.getSchema(), IOUtils.getOutputStream(IOUtils.getFileUrl(outputFile.toString()), false));
dataFileWriter.append(xytData);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}

private XYTData calcAvroMeans(List<GenericRecord> recordList) {
String crs = null;
List<Float> xCoords = new ArrayList<>();
List<Float> yCoords = new ArrayList<>();
List<Integer> timeStamps = new ArrayList<>();
Map<CharSequence, List<Float>> data = new HashMap<>();

for (GenericRecord genericRecord: recordList) {

String object0 = genericRecord.get(0).toString();
Object object1 = genericRecord.get(1);
Object object2 = genericRecord.get(2);
Object object3 = genericRecord.get(3);
Object object4 = genericRecord.get(4);

if (crs == null) {
crs = object0;
}

getCoordData(object1, xCoords);
getCoordData(object2, yCoords);


// TODO: for the example data even for the hourly data there was only one time stamp. This might be different with real data. this needs to be checked
if (object3 instanceof GenericData.Array<?>) {
List<Integer> ints = new ArrayList<>((GenericData.Array<Integer>) object3);

if (!timeStamps.equals(ints)) {
if (timeStamps.isEmpty()) {
timeStamps.addAll(ints);
} else {
log.error("List of time stamps from the different run seeds are not identical, this should not happen. Abort.");
throw new IllegalArgumentException();
}
}
}

// there should be only one key in the map
if (object4 instanceof HashMap<?, ?>) {
List<Float> values = new ArrayList<>();

for (Map.Entry<?, ?> entry : ((HashMap<?, ?>) object4).entrySet()) {
if (entry.getKey() instanceof Utf8 && entry.getKey().toString().equals("imissions") && entry.getValue() instanceof GenericData.Array<?>) {
values.addAll((GenericData.Array<Float>) entry.getValue());

String entryString = ((Utf8) entry.getKey()).toString();

if (data.get(entryString) == null) {
// if map = list (which is its only value) is empty: set values as list (it is the first iteration of this for loop)
data.put(entryString, values);
} else {
// if there already is an entry in the map, take the values from list and update them
for (Float f : data.get(entryString)) {
data.get(entryString).set(data.get(entryString).indexOf(f), f + values.get(data.get(entryString).indexOf(f)));
}
}
}
}
}
}

// calc mean values for each datapoint out of sums and number of records (1 record = 1 run seed)
data.entrySet()
.stream()
.filter(entry -> entry.getKey().equals("imissions"))
.forEach(entry -> entry.getValue()
.forEach(value -> entry.getValue().set(entry.getValue().indexOf(value), value / recordList.size())));

return new XYTData(crs, xCoords, yCoords, timeStamps, data);
}

private void getCoordData(Object object, List<Float> target) {
if (object instanceof GenericData.Array<?>) {
List<Float> floats = new ArrayList<>((GenericData.Array<Float>) object);

if (!target.equals(floats)) {
if (target.isEmpty()) {
target.addAll(floats);
} else {
log.error("List of coords from the different run seeds are not identical, this should not happen. Abort.");
throw new IllegalArgumentException();
}
}
}
}

private void readAvroFile(String input, List<GenericRecord> target) {
try {
// Read the schema from the Avro file
FileReader<GenericRecord> fileReader = DataFileReader.openReader(new File(input), new GenericDatumReader<>());

// Print the schema
log.info("Reading .avro file from {} with schema {}.", input, fileReader.getSchema());

// Read records and save to list
while (fileReader.hasNext()) {
target.add(fileReader.next());
}

fileReader.close();
} catch (IOException e) {
log.error(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,24 +68,6 @@ private String postProcess(Data data, String outputFile) {
return data.compute(EmissionsPostProcessingAverageAnalysis.class, outputFile, args.toArray(new String[0]));
}

private String copyGeoJsonNetwork() {

for (String dir : dirs) {
File networkFile = new File(dir + "/analysis/network/network.geojson");
Path target = Path.of(Path.of(dir).getParent() + "/analysis/network");

if (Files.notExists(target) && networkFile.exists() && networkFile.isFile()) {
try {
Files.createDirectories(target);
Files.copy(networkFile.toPath(), Path.of(target + "/network.geojson"));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
return "analysis/network/network.geojson";
}

/**
* Produces the dashboard.
*/
Expand Down Expand Up @@ -114,7 +96,7 @@ public void configure(Header header, Layout layout) {
viz.height = 12.0;
viz.datasets.csvFile = postProcess(data, "mean_emissions_per_link_per_m.csv");
viz.datasets.csvBase = Path.of(this.dirs.get(0)).getParent().relativize(Path.of(pathToCsvBase)).toString();
viz.network = copyGeoJsonNetwork();
viz.network = new CreateAverageDashboards().copyGeoJsonNetwork(dirs);
viz.display.color.columnName = "CO2_TOTAL [g/m]";
viz.display.color.dataset = "csvFile";
viz.display.width.scaleFactor = 100;
Expand Down
Loading

0 comments on commit 72d10d7

Please sign in to comment.