Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Oct 30, 2023
1 parent 295d78f commit 60ad2ef
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 73 deletions.
1 change: 1 addition & 0 deletions pvoutput/mapscraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,6 +350,7 @@ def clean_soup(soup):
"""Function to clean scraped soup object.
Note that the downloaded soup could change over time.
Args:
soup: bs4.BeautifulSoup
Expand Down
91 changes: 23 additions & 68 deletions pvoutput/pvoutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,7 @@ def search(
if lat is not None and lon is not None:
api_params["ll"] = "{:f},{:f}".format(lat, lon)

pv_systems_text = self._api_query(
service="search", api_params=api_params, **kwargs
)
pv_systems_text = self._api_query(service="search", api_params=api_params, **kwargs)

pv_systems = pd.read_csv(
StringIO(pv_systems_text),
Expand Down Expand Up @@ -316,9 +314,7 @@ def get_system_status(
temperature_C,
voltage,
"""
_LOG.info(
f"system_ids {pv_system_ids}: Requesting batch system status for %s", date
)
_LOG.info(f"system_ids {pv_system_ids}: Requesting batch system status for %s", date)
date = date_to_pvoutput_str(date)
_check_date(date)

Expand All @@ -336,9 +332,7 @@ def get_system_status(
)

except NoStatusFound:
_LOG.info(
f"system_id {all_pv_system_id}: No status found for date %s", date
)
_LOG.info(f"system_id {all_pv_system_id}: No status found for date %s", date)
pv_system_status_text = "no status found"

# each pv system is on a new line
Expand Down Expand Up @@ -443,8 +437,7 @@ def get_batch_status(
time.sleep(1)
else:
_print_and_log(
"Call get_batch_status again in a minute to see if"
" results are ready."
"Call get_batch_status again in a minute to see if" " results are ready."
)
else:
break
Expand Down Expand Up @@ -571,12 +564,8 @@ def get_metadata_for_country(
**kwargs,
)

_LOG.debug(
f"getting metadata for {country_code} for {start_id_range} to {end_id_range}"
)
print(
f"getting metadata for {country_code} for {start_id_range} to {end_id_range}"
)
_LOG.debug(f"getting metadata for {country_code} for {start_id_range} to {end_id_range}")
print(f"getting metadata for {country_code} for {start_id_range} to {end_id_range}")

pv_metadata_for_country = pd.read_csv(
StringIO(pv_metadata_text),
Expand Down Expand Up @@ -688,12 +677,8 @@ def get_statistic(
else:
pv_metadata.index = [pv_system_id]

pv_metadata["query_date_from"] = (
pd.Timestamp(date_from) if date_from else pd.NaT
)
pv_metadata["query_date_to"] = (
pd.Timestamp(date_to) if date_to else pd.Timestamp.now()
)
pv_metadata["query_date_from"] = pd.Timestamp(date_from) if date_from else pd.NaT
pv_metadata["query_date_to"] = pd.Timestamp(date_to) if date_to else pd.Timestamp.now()
return pv_metadata

def _get_statistic_with_cache(
Expand Down Expand Up @@ -740,9 +725,7 @@ def _get_fresh_statistic():
return stats

try:
stats = pd.read_hdf(
store_filename, key="statistics", where="index=pv_system_id"
)
stats = pd.read_hdf(store_filename, key="statistics", where="index=pv_system_id")
except (FileNotFoundError, KeyError):
return _get_fresh_statistic()

Expand Down Expand Up @@ -808,9 +791,7 @@ def download_multiple_systems_to_disk(
n = len(system_ids)
for i, pv_system_id in enumerate(system_ids):
_LOG.info("**********************")
msg = "system_id {:d}: {:d} of {:d} ({:%})".format(
pv_system_id, i + 1, n, (i + 1) / n
)
msg = "system_id {:d}: {:d} of {:d} ({:%})".format(pv_system_id, i + 1, n, (i + 1) / n)
_LOG.info(msg)
print("\r", msg, end="", flush=True)

Expand Down Expand Up @@ -944,13 +925,9 @@ def _filter_date_range(
_LOG.info("system_id %d: Stats say there is no data!", system_id)
return []

timeseries_date_range = DateRange(
stats["actual_date_from"], stats["actual_date_to"]
)
timeseries_date_range = DateRange(stats["actual_date_from"], stats["actual_date_to"])

data_availability = stats["num_outputs"] / (
timeseries_date_range.total_days() + 1
)
data_availability = stats["num_outputs"] / (timeseries_date_range.total_days() + 1)

if data_availability < min_data_availability:
_LOG.info(
Expand Down Expand Up @@ -1091,9 +1068,7 @@ def _api_query(
RateLimitExceeded
"""
get_response_func = (
self._get_data_service_response
if use_data_service
else self._get_api_response
self._get_data_service_response if use_data_service else self._get_api_response
)

try:
Expand All @@ -1105,16 +1080,13 @@ def _api_query(
try:
return self._process_api_response(response)
except RateLimitExceeded:
msg = (
"PVOutput.org API rate limit exceeded!"
" Rate limit will be reset at {}".format(self.rate_limit_reset_time)
msg = "PVOutput.org API rate limit exceeded!" " Rate limit will be reset at {}".format(
self.rate_limit_reset_time
)
_print_and_log(msg)
if wait_if_rate_limit_exceeded:
self.wait_for_rate_limit_reset()
return self._api_query(
service, api_params, wait_if_rate_limit_exceeded=False
)
return self._api_query(service, api_params, wait_if_rate_limit_exceeded=False)

raise RateLimitExceeded(response, msg)

Expand All @@ -1138,9 +1110,7 @@ def _get_api_response(self, service: str, api_params: Dict) -> requests.Response

return _get_response(api_url, api_params, headers)

def _get_data_service_response(
self, service: str, api_params: Dict
) -> requests.Response:
def _get_data_service_response(self, service: str, api_params: Dict) -> requests.Response:
"""
Get the data service response from pvoutput.org
Expand Down Expand Up @@ -1172,9 +1142,7 @@ def _set_rate_limit_params(self, headers):
header_value = int(headers[header_key])
setattr(self, param_name, header_value)

self.rate_limit_reset_time = pd.Timestamp.utcfromtimestamp(
self.rate_limit_reset_time
)
self.rate_limit_reset_time = pd.Timestamp.utcfromtimestamp(self.rate_limit_reset_time)
self.rate_limit_reset_time = self.rate_limit_reset_time.tz_convert("utc")

_LOG.debug("%s", self.rate_limit_info())
Expand Down Expand Up @@ -1248,9 +1216,7 @@ def wait_for_rate_limit_reset(self, do_sleeping: bool = True) -> int:
# retry_time_local = retry_time_utc.tz_convert(tz=datetime.now(tzlocal()).tzname())
retry_time_local = retry_time_utc
_print_and_log(
"Waiting {:.0f} seconds. Will retry at {} UTC".format(
secs_to_wait, retry_time_local
)
"Waiting {:.0f} seconds. Will retry at {} UTC".format(secs_to_wait, retry_time_local)
)
if do_sleeping:
time.sleep(secs_to_wait)
Expand Down Expand Up @@ -1339,25 +1305,14 @@ def _append_missing_date_range(
missing_end_date,
)
with pd.HDFStore(output_filename, mode="a", complevel=9) as store:
store.append(
key="missing_dates", value=new_missing_date_range, data_columns=True
)
store.append(key="missing_dates", value=new_missing_date_range, data_columns=True)


def _record_gaps(
output_filename, pv_system_id, date_to, timeseries, datetime_of_api_request
):
def _record_gaps(output_filename, pv_system_id, date_to, timeseries, datetime_of_api_request):
dates_of_data = (
timeseries["instantaneous_power_gen_W"]
.dropna()
.resample("D")
.mean()
.dropna()
.index.date
timeseries["instantaneous_power_gen_W"].dropna().resample("D").mean().dropna().index.date
)
dates_requested = pd.date_range(
date_to - timedelta(days=365), date_to, freq="D"
).date
dates_requested = pd.date_range(date_to - timedelta(days=365), date_to, freq="D").date
missing_dates = set(dates_requested) - set(dates_of_data)
missing_date_ranges = _convert_consecutive_dates_to_date_ranges(list(missing_dates))
_LOG.info(
Expand Down
11 changes: 6 additions & 5 deletions scripts/fetch_pv_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@
or create and use a ~/.pvoutput.yml file as described in the PVOutput library documentation
"""

from pvoutput import *

import click as cl
import datetime as dt
import logging
import pathlib
import sys

import click as cl
import pandas as pd
import pathlib
import logging

from pvoutput import *


@cl.command()
Expand Down

0 comments on commit 60ad2ef

Please sign in to comment.