Skip to content

Commit

Permalink
Updated requirements.txt to resolve ModuleNotFoundError in GitHub Act…
Browse files Browse the repository at this point in the history
…ions workflow.
  • Loading branch information
MeenaBana committed Aug 14, 2024
1 parent 72cd4c1 commit 9ff34b8
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 30 deletions.
29 changes: 16 additions & 13 deletions notebooks/Exploring_API_Functions_Authentication.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@
"import datetime\n",
"import os\n",
"from dotenv import load_dotenv \n",
"from pathlib import Path"
"from pathlib import Path\n",
"import nest_asyncio\n",
"\n",
"nest_asyncio.apply()"
]
},
{
Expand Down Expand Up @@ -269,13 +272,15 @@
"metadata": {},
"outputs": [],
"source": [
"# 1 day raw historical data\n",
"one_day_raw_historical_data = opc_data.get_raw_historical_values(\n",
" start_time = datetime.datetime(2024, 7, 13, 00, 00),\n",
" end_time = datetime.datetime(2024, 7, 13, 23, 59),\n",
"# 1 day aggregated historical data\n",
"one_day_historical_data = opc_data.get_historical_aggregated_values(\n",
" start_time=(datetime.datetime.now() - datetime.timedelta(30)),\n",
" end_time=(datetime.datetime.now() - datetime.timedelta(29)),\n",
" pro_interval=60*1000,\n",
" agg_name=\"Average\",\n",
" variable_list=string_sets_for_first_park.variables_as_list([\"DCPower\"])\n",
")\n",
"one_day_raw_historical_data"
"one_day_historical_data"
]
},
{
Expand All @@ -284,15 +289,13 @@
"metadata": {},
"outputs": [],
"source": [
"# 1 day aggregated historical data\n",
"one_day_historical_data = opc_data.get_historical_aggregated_values(\n",
" start_time=(datetime.datetime.now() - datetime.timedelta(30)),\n",
" end_time=(datetime.datetime.now() - datetime.timedelta(29)),\n",
" pro_interval=60*1000,\n",
" agg_name=\"Average\",\n",
"# 1 day raw historical data\n",
"one_day_raw_historical_data = opc_data.get_raw_historical_values(\n",
" start_time = datetime.datetime(2024, 7, 13, 00, 00),\n",
" end_time = datetime.datetime(2024, 7, 13, 23, 59),\n",
" variable_list=string_sets_for_first_park.variables_as_list([\"DCPower\"])\n",
")\n",
"one_day_historical_data"
"one_day_raw_historical_data"
]
},
{
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
nest_asyncio
sphinx_rtd_theme
tox
ipykernel
Expand Down
55 changes: 38 additions & 17 deletions src/pyprediktormapclient/opc_ua.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,8 @@ class WriteReturn(BaseModel):
class AsyncIONotebookHelper:
@staticmethod
def run_coroutine(coroutine):
return asyncio.get_event_loop().run_until_complete(coroutine)
loop = asyncio.get_event_loop()
return loop.run_until_complete(coroutine)

class Config:
arbitrary_types_allowed = True
Expand Down Expand Up @@ -347,22 +348,40 @@ def _process_df(self, df_result: pd.DataFrame, columns: Dict[str, str]) -> pd.Da
async def _make_request(self, endpoint: str, body: dict, max_retries: int, retry_delay: int):
for attempt in range(max_retries):
try:
logging.info(f"Attempt {attempt + 1} of {max_retries}")
async with ClientSession() as session:
async with session.post(
f"{self.rest_url}{endpoint}",
json=body,
headers=self.headers
) as response:
response.raise_for_status()
url = f"{self.rest_url}{endpoint}"
logging.info(f"Making POST request to {url}")
logging.debug(f"Request body: {body}")
logging.debug(f"Request headers: {self.headers}")

async with session.post(url, json=body, headers=self.headers) as response:
logging.info(f"Response received: Status {response.status}")

if response.status >= 400:
error_text = await response.text()
logging.error(f"HTTP error {response.status}: {error_text}")
response.raise_for_status()

return await response.json()

except aiohttp.ClientResponseError as e:
if e.status == 500:
logging.error(f"Server Error: {e}")
raise # For 500 errors, we might want to fail fast
logging.error(f"ClientResponseError: {e}")
except aiohttp.ClientError as e:
if attempt < max_retries - 1:
wait_time = retry_delay * (2 ** attempt)
logger.warning(f"Request failed. Retrying in {wait_time} seconds...")
await asyncio.sleep(wait_time)
else:
logger.error(f"Max retries reached. Error: {e}")
raise RuntimeError(f'Error message {e}')
logging.error(f"ClientError in POST request: {e}")
except Exception as e:
logging.error(f"Unexpected error in _make_request: {e}")

if attempt < max_retries - 1:
wait_time = retry_delay * (2 ** attempt)
logging.warning(f"Request failed. Retrying in {wait_time} seconds...")
await asyncio.sleep(wait_time)
else:
logging.error(f"Max retries reached.")
raise RuntimeError('Max retries reached')

def _process_content(self, content: dict) -> pd.DataFrame:
self._check_content(content)
Expand Down Expand Up @@ -467,8 +486,9 @@ async def get_raw_historical_values_asyn(
return self._process_df(combined_df, columns)

def get_raw_historical_values(self, *args, **kwargs):
return self.helper.run_coroutine(self.get_raw_historical_values_asyn(*args, **kwargs))

result = self.helper.run_coroutine(self.get_raw_historical_values_asyn(*args, **kwargs))
return result


async def get_historical_aggregated_values_asyn(
self,
Expand Down Expand Up @@ -508,7 +528,8 @@ async def get_historical_aggregated_values_asyn(
return self._process_df(combined_df, columns)

def get_historical_aggregated_values(self, *args, **kwargs):
return self.helper.run_coroutine(self.get_historical_aggregated_values_asyn(*args, **kwargs))
result = self.helper.run_coroutine(self.get_historical_aggregated_values_asyn(*args, **kwargs))
return result


def write_values(self, variable_list: List[WriteVariables]) -> List:
Expand Down

0 comments on commit 9ff34b8

Please sign in to comment.