From 9ff34b8018cb71572197d056db4bc3555448f330 Mon Sep 17 00:00:00 2001 From: MeenBna Date: Wed, 14 Aug 2024 09:38:51 +0200 Subject: [PATCH] Updated requirements.txt to resolve ModuleNotFoundError in GitHub Actions workflow. --- ...ploring_API_Functions_Authentication.ipynb | 29 +++++----- requirements.txt | 1 + src/pyprediktormapclient/opc_ua.py | 55 +++++++++++++------ 3 files changed, 55 insertions(+), 30 deletions(-) diff --git a/notebooks/Exploring_API_Functions_Authentication.ipynb b/notebooks/Exploring_API_Functions_Authentication.ipynb index 4825d6c..b7bf855 100644 --- a/notebooks/Exploring_API_Functions_Authentication.ipynb +++ b/notebooks/Exploring_API_Functions_Authentication.ipynb @@ -17,7 +17,10 @@ "import datetime\n", "import os\n", "from dotenv import load_dotenv \n", - "from pathlib import Path" + "from pathlib import Path\n", + "import nest_asyncio\n", + "\n", + "nest_asyncio.apply()" ] }, { @@ -269,13 +272,15 @@ "metadata": {}, "outputs": [], "source": [ - "# 1 day raw historical data\n", - "one_day_raw_historical_data = opc_data.get_raw_historical_values(\n", - " start_time = datetime.datetime(2024, 7, 13, 00, 00),\n", - " end_time = datetime.datetime(2024, 7, 13, 23, 59),\n", + "# 1 day aggregated historical data\n", + "one_day_historical_data = opc_data.get_historical_aggregated_values(\n", + " start_time=(datetime.datetime.now() - datetime.timedelta(30)),\n", + " end_time=(datetime.datetime.now() - datetime.timedelta(29)),\n", + " pro_interval=60*1000,\n", + " agg_name=\"Average\",\n", " variable_list=string_sets_for_first_park.variables_as_list([\"DCPower\"])\n", ")\n", - "one_day_raw_historical_data" + "one_day_historical_data" ] }, { @@ -284,15 +289,13 @@ "metadata": {}, "outputs": [], "source": [ - "# 1 day aggregated historical data\n", - "one_day_historical_data = opc_data.get_historical_aggregated_values(\n", - " start_time=(datetime.datetime.now() - datetime.timedelta(30)),\n", - " end_time=(datetime.datetime.now() - datetime.timedelta(29)),\n", - " pro_interval=60*1000,\n", - " agg_name=\"Average\",\n", + "# 1 day raw historical data\n", + "one_day_raw_historical_data = opc_data.get_raw_historical_values(\n", + " start_time = datetime.datetime(2024, 7, 13, 00, 00),\n", + " end_time = datetime.datetime(2024, 7, 13, 23, 59),\n", " variable_list=string_sets_for_first_park.variables_as_list([\"DCPower\"])\n", ")\n", - "one_day_historical_data" + "one_day_raw_historical_data" ] }, { diff --git a/requirements.txt b/requirements.txt index 35385f1..1a91227 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +nest_asyncio sphinx_rtd_theme tox ipykernel diff --git a/src/pyprediktormapclient/opc_ua.py b/src/pyprediktormapclient/opc_ua.py index 9a582e8..12c9420 100644 --- a/src/pyprediktormapclient/opc_ua.py +++ b/src/pyprediktormapclient/opc_ua.py @@ -129,7 +129,8 @@ class WriteReturn(BaseModel): class AsyncIONotebookHelper: @staticmethod def run_coroutine(coroutine): - return asyncio.get_event_loop().run_until_complete(coroutine) + loop = asyncio.get_event_loop() + return loop.run_until_complete(coroutine) class Config: arbitrary_types_allowed = True @@ -347,22 +348,40 @@ def _process_df(self, df_result: pd.DataFrame, columns: Dict[str, str]) -> pd.Da async def _make_request(self, endpoint: str, body: dict, max_retries: int, retry_delay: int): for attempt in range(max_retries): try: + logging.info(f"Attempt {attempt + 1} of {max_retries}") async with ClientSession() as session: - async with session.post( - f"{self.rest_url}{endpoint}", - json=body, - headers=self.headers - ) as response: - response.raise_for_status() + url = f"{self.rest_url}{endpoint}" + logging.info(f"Making POST request to {url}") + logging.debug(f"Request body: {body}") + logging.debug(f"Request headers: {self.headers}") + + async with session.post(url, json=body, headers=self.headers) as response: + logging.info(f"Response received: Status {response.status}") + + if response.status >= 400: + error_text = await response.text() + logging.error(f"HTTP error {response.status}: {error_text}") + response.raise_for_status() + return await response.json() + + except aiohttp.ClientResponseError as e: + if e.status == 500: + logging.error(f"Server Error: {e}") + raise # For 500 errors, we might want to fail fast + logging.error(f"ClientResponseError: {e}") except aiohttp.ClientError as e: - if attempt < max_retries - 1: - wait_time = retry_delay * (2 ** attempt) - logger.warning(f"Request failed. Retrying in {wait_time} seconds...") - await asyncio.sleep(wait_time) - else: - logger.error(f"Max retries reached. Error: {e}") - raise RuntimeError(f'Error message {e}') + logging.error(f"ClientError in POST request: {e}") + except Exception as e: + logging.error(f"Unexpected error in _make_request: {e}") + + if attempt < max_retries - 1: + wait_time = retry_delay * (2 ** attempt) + logging.warning(f"Request failed. Retrying in {wait_time} seconds...") + await asyncio.sleep(wait_time) + else: + logging.error(f"Max retries reached.") + raise RuntimeError('Max retries reached') def _process_content(self, content: dict) -> pd.DataFrame: self._check_content(content) @@ -467,8 +486,9 @@ async def get_raw_historical_values_asyn( return self._process_df(combined_df, columns) def get_raw_historical_values(self, *args, **kwargs): - return self.helper.run_coroutine(self.get_raw_historical_values_asyn(*args, **kwargs)) - + result = self.helper.run_coroutine(self.get_raw_historical_values_asyn(*args, **kwargs)) + return result + async def get_historical_aggregated_values_asyn( self, @@ -508,7 +528,8 @@ async def get_historical_aggregated_values_asyn( return self._process_df(combined_df, columns) def get_historical_aggregated_values(self, *args, **kwargs): - return self.helper.run_coroutine(self.get_historical_aggregated_values_asyn(*args, **kwargs)) + result = self.helper.run_coroutine(self.get_historical_aggregated_values_asyn(*args, **kwargs)) + return result def write_values(self, variable_list: List[WriteVariables]) -> List: