Skip to content

Commit

Permalink
Integrates Timeslices, change time-series to PU (#32)
Browse files Browse the repository at this point in the history
Changes and Fixes:

- Integrated timeslcies with new timeslice and timeslice_value fields
from plexosdb query and associated parsing logic
- Renames variables to be more intuitive. Now variables and fields for
max_active_power/active_power/rating match the Sienna definitions of
these terms instead of plexos definition.
- Changes export of max_active_power to be per-unit
- Fixes read-order of date_from / date_to filtering which caused all
retiring units to previously not be included in system
- Refactors `self._get_value()` 
- Fix #25 
- Fix #31
  • Loading branch information
ktehranchi authored Sep 12, 2024
1 parent eec9cb6 commit fb21275
Show file tree
Hide file tree
Showing 6 changed files with 254 additions and 185 deletions.
26 changes: 13 additions & 13 deletions src/r2x/defaults/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
},
"default_num_units": 1,
"default_reserve_types": [
"Spinning",
"Flexibility",
"Regulation"
"SPINNING",
"FLEXIBILITY",
"REGULATION"
],
"device_inference_string": {},
"distribution_losses": 1,
Expand Down Expand Up @@ -260,24 +260,24 @@
},
"reserve_types": {
"1": {
"direction": "Up",
"type": "Spinning"
"direction": "UP",
"type": "SPINNING"
},
"2": {
"direction": "Down",
"type": "Spinning"
"direction": "DOWN",
"type": "SPINNING"
},
"3": {
"direction": "Up",
"type": "Regulation"
"direction": "UP",
"type": "REGULATION"
},
"4": {
"direction": "Down",
"type": "Regulation"
"direction": "DOWN",
"type": "REGULATION"
},
"default": {
"direction": "Up",
"type": "Spinning"
"direction": "UP",
"type": "SPINNING"
}
},
"reserve_vors": {
Expand Down
4 changes: 2 additions & 2 deletions src/r2x/defaults/plexos_input.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"Load Risk": "load_risk",
"Loss Incr": "losses",
"Maintenance Rate": "planned_outage_rate",
"Max Capacity": "active_power",
"Max Capacity": "rating",
"Max Flow": "max_power_flow",
"Max Ramp Down": "ramp_down",
"Max Ramp Up": "ramp_up",
Expand All @@ -34,7 +34,7 @@
"Production Rate": "rate",
"Pump Efficiency": "pump_efficiency",
"Pump Load": "pump_load",
"Rating": "rating",
"Rating": "max_active_power",
"Reactance": "reactance",
"Resistance": "resistance",
"Start Cost": "startup_cost",
Expand Down
6 changes: 5 additions & 1 deletion src/r2x/exporter/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,14 @@ def export_data_files(self, time_series_folder: str = "Data") -> None:
config_dict["component_type"] = component_type
csv_fname = string_template.safe_substitute(config_dict)
csv_table = np.column_stack([date_time_column, *time_series_arrays])
header = '"DateTime",' + ",".join(
[f'"{name}"' for name in self.time_series_name_by_type[component_type]]
)

np.savetxt(
csv_fpath / csv_fname,
csv_table,
header="DateTime," + ",".join(self.time_series_name_by_type[component_type]),
header=header,
delimiter=",",
comments="",
fmt="%s",
Expand Down
24 changes: 1 addition & 23 deletions src/r2x/exporter/sienna.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
from loguru import logger

# Local imports
from infrasys.time_series_models import SingleTimeSeries
from pint import Quantity
from r2x.exporter.handler import BaseExporter
from r2x.models import (
ACBranch,
Expand Down Expand Up @@ -228,6 +226,7 @@ def process_gen_data(self, fname="gen.csv"):
"prime_mover_type",
"bus_id",
"fuel",
"base_mva",
"rating",
"unit_type",
"active_power",
Expand Down Expand Up @@ -436,27 +435,6 @@ def create_timeseries_pointers(self) -> None:
logger.info("File timeseries_pointers.json created.")
return

def create_extra_data_json(self) -> None:
"""Create extra_data.json file."""
extra_data = []
for model in self.system.get_component_types():
model_type_name = model.__name__
component_dict = {
component.name: {
item: value.to_tuple() if isinstance(value, Quantity) else value
for item, value in component.ext.items()
if not isinstance(value, SingleTimeSeries)
}
for component in self.system.get_components(model)
}
extra_data.append({model_type_name: component_dict})

with open(os.path.join(self.output_folder, "extra_data.json"), mode="w") as f:
json.dump(extra_data, f)

logger.info("File extra_data.json created.")
return

def export_data(self) -> None:
"""Export csv data to specified folder from output_data attribute."""
logger.debug("Saving Sienna data and timeseries files.")
Expand Down
22 changes: 22 additions & 0 deletions src/r2x/parser/parser_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from loguru import logger
import polars as pl
import pandas as pd
from datetime import datetime


def pl_filter_year(df, year: int | None = None, year_columns=["t", "year"], **kwargs):
Expand All @@ -22,6 +23,27 @@ def pl_filter_year(df, year: int | None = None, year_columns=["t", "year"], **kw
return df.filter(pl.col(matching_names[0]) == year)


def filter_property_dates(system_data: pl.DataFrame, study_year: int):
"""filters query by date_from and date_to"""
# note this only filters by first day of year, at some point revisit this to include partial years
# Remove Property by study year & date_from/to
study_year_date = datetime(study_year, 1, 1)
date_filter = ((pl.col("date_from").is_null()) | (pl.col("date_from") <= study_year_date)) & (
(pl.col("date_to").is_null()) | (pl.col("date_to") >= study_year_date)
)

# Convert date_from and date_to to datetime
system_data = system_data.with_columns(
[
pl.col("date_from").str.strptime(pl.Datetime, "%Y-%m-%dT%H:%M:%S").cast(pl.Date),
pl.col("date_to").str.strptime(pl.Datetime, "%Y-%m-%dT%H:%M:%S").cast(pl.Date),
]
)

system_data = system_data.filter(date_filter)
return system_data


def pl_lowercase(df: pl.DataFrame, **kwargs):
logger.trace("Lowercase columns: {}", df.collect_schema().names())
result = df.with_columns(pl.col(pl.String).str.to_lowercase()).rename(
Expand Down
Loading

0 comments on commit fb21275

Please sign in to comment.