Skip to content

Commit

Permalink
Merge pull request #28 from PrediktorAS/chore/add_analytics
Browse files Browse the repository at this point in the history
Added analytics helper
  • Loading branch information
alydersen authored Sep 26, 2022
2 parents b7755d6 + 57ef8ef commit 2fd234f
Show file tree
Hide file tree
Showing 7 changed files with 409 additions and 72 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
Changelog
=========

version 0.1.0
===========

- Added an analytics helper

version 0.0.9
===========

Expand Down
54 changes: 31 additions & 23 deletions notebooks/Example_Data_Downloading.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"This notebook contains examples of using model index and OPC UA functions to download inverters, strings set and trackers data of the site 'EG-AS' from the model index and OPC UA api servers. The aggregated historical data downloaded in the data folder and cell execution time can be noted for each request."
"This notebook contains examples of using model index and OPC UA functions to download inverters, strings set and trackers data of the first site from the model index and OPC UA api servers. The aggregated historical data downloaded in the data folder and cell execution time can be noted for each request."
]
},
{
Expand Down Expand Up @@ -62,6 +62,9 @@
"# Import OPC UA functions\n",
"from pyprediktormapclient.opc_ua import OPC_UA\n",
"\n",
"# Import Analytics Helper\n",
"from pyprediktormapclient.analytics_helper import AnalyticsHelper\n",
"\n",
"# Import \"Dataframer\" Tools\n",
"from pyprediktormapclient.shared import *"
]
Expand Down Expand Up @@ -115,8 +118,9 @@
"outputs": [],
"source": [
"# All the sites on the OPC server\n",
"sites = model.get_objects_of_type('SiteType', return_format=\"dataframe\")\n",
"sites"
"sites_json = model.get_objects_of_type('SiteType')\n",
"sites = AnalyticsHelper(sites_json)\n",
"sites.dataframe"
]
},
{
Expand All @@ -126,8 +130,9 @@
"outputs": [],
"source": [
"# Selecting the first site\n",
"site = sites.iloc[0]\n",
"site_ids = [site['Id']]"
"all_site_ids = sites.list_of_ids()\n",
"first_site_id = all_site_ids[0]\n",
"first_site_id"
]
},
{
Expand All @@ -144,8 +149,9 @@
"outputs": [],
"source": [
"# All the inverters on the site\n",
"inverters = model.get_object_descendants(\"InverterType\", site_ids, \"PV_Assets\", return_format=\"dataframe\")\n",
"inverters"
"inverters_json = model.get_object_descendants(\"InverterType\", all_site_ids, \"PV_Assets\")\n",
"inverters = AnalyticsHelper(inverters_json)\n",
"inverters.dataframe"
]
},
{
Expand All @@ -166,7 +172,7 @@
"outputs": [],
"source": [
"# Live value inverters data\n",
"inv_liv_values = tsdata.get_live_values_data(inv_var_list, inverters)\n",
"inv_liv_values = tsdata.get_live_values_data(inv_var_list, inverters.dataframe)\n",
"inv_liv_values"
]
},
Expand All @@ -177,7 +183,7 @@
"outputs": [],
"source": [
"# 1 day aggregated historical inverter data\n",
"await tsdata.get_agg_hist_value_data(start_time=start_time, end_time=end_time, pro_interval=pro_interval, agg_name=agg_name, obj_dataframe=inverters, include_variables=inv_var_list)"
"await tsdata.get_agg_hist_value_data(start_time=start_time, end_time=end_time, pro_interval=pro_interval, agg_name=agg_name, obj_dataframe=inverters.dataframe, include_variables=inv_var_list)"
]
},
{
Expand All @@ -187,7 +193,7 @@
"outputs": [],
"source": [
"# 15 day inverters data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, inverters, inv_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, inverters.dataframe, inv_var_list)"
]
},
{
Expand All @@ -197,7 +203,7 @@
"outputs": [],
"source": [
"# 1 month inverter data \n",
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, inverters, inv_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, inverters.dataframe, inv_var_list)"
]
},
{
Expand All @@ -214,8 +220,9 @@
"outputs": [],
"source": [
"# Strings set data \n",
"strings = model.get_object_descendants(\"StringSetType\", site_ids, \"PV_Assets\", return_format=\"dataframe\")\n",
"strings"
"strings_json = model.get_object_descendants(\"StringSetType\", all_site_ids, \"PV_Assets\")\n",
"strings = AnalyticsHelper(strings_json)\n",
"strings.dataframe"
]
},
{
Expand All @@ -235,7 +242,7 @@
"outputs": [],
"source": [
"# Live value strings data \n",
"strng_liv_values = tsdata.get_live_values_data(strng_var_list, strings)\n",
"strng_liv_values = tsdata.get_live_values_data(strng_var_list, strings.dataframe)\n",
"strng_liv_values"
]
},
Expand All @@ -246,7 +253,7 @@
"outputs": [],
"source": [
"# 1 day strings data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, strings, strng_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, strings.dataframe, strng_var_list)"
]
},
{
Expand All @@ -256,7 +263,7 @@
"outputs": [],
"source": [
"# 15 days strings data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, strings, strng_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, strings.dataframe, strng_var_list)"
]
},
{
Expand All @@ -266,7 +273,7 @@
"outputs": [],
"source": [
"# 1 month strings data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, strings, strng_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, strings.dataframe, strng_var_list)"
]
},
{
Expand All @@ -283,8 +290,9 @@
"outputs": [],
"source": [
"# Trackers data \n",
"trackers = model.get_object_ancestors(\"TrackerType\", get_ids_from_dataframe(strings), \"PV_Serves\", return_format=\"dataframe\")\n",
"trackers"
"trackers_json = model.get_object_ancestors(\"TrackerType\", strings.list_of_ids(), \"PV_Serves\")\n",
"trackers = AnalyticsHelper(trackers_json)\n",
"trackers.dataframe"
]
},
{
Expand All @@ -303,7 +311,7 @@
"outputs": [],
"source": [
"# Live value trackers data\n",
"track_liv_values = tsdata.get_live_values_data(track_var_list, trackers)\n",
"track_liv_values = tsdata.get_live_values_data(track_var_list, trackers.dataframe)\n",
"track_liv_values"
]
},
Expand All @@ -314,7 +322,7 @@
"outputs": [],
"source": [
"# 1 day trackers data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, trackers, track_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, trackers.dataframe, track_var_list)"
]
},
{
Expand All @@ -325,7 +333,7 @@
"source": [
"# 15 day trackers data \n",
"\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, trackers, track_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time1, pro_interval, agg_name, trackers.dataframe, track_var_list)"
]
},
{
Expand All @@ -335,7 +343,7 @@
"outputs": [],
"source": [
"# 1 month trackers data \n",
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, trackers, track_var_list)"
"await tsdata.get_agg_hist_value_data(start_time, end_time2, pro_interval, agg_name, trackers.dataframe, track_var_list)"
]
}
],
Expand Down
62 changes: 41 additions & 21 deletions notebooks/Exploring_API_Functions.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,9 @@
"# Import OPC UA functions\n",
"from pyprediktormapclient.opc_ua import OPC_UA\n",
"\n",
"# Import Analytics Helper\n",
"from pyprediktormapclient.analytics_helper import AnalyticsHelper\n",
"\n",
"# Import \"Dataframer\" Tools\n",
"from pyprediktormapclient.shared import *"
]
Expand Down Expand Up @@ -156,11 +159,21 @@
"outputs": [],
"source": [
"# To get the objects of a type\n",
"sitetypes = model.get_objects_of_type(\"SiteType\")\n",
"sites_json = model.get_objects_of_type(\"SiteType\")\n",
"\n",
"# Send the returned JSON into a normalizer to get Id, Type, Name, Props and Vars as columns\n",
"sitetypes_dataframe = normalize_as_dataframe(sitetypes)\n",
"sitetypes_dataframe"
"sites = AnalyticsHelper(sites_json)\n",
"sites.list_of_names()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Analytics helper\n",
"sites.variables_as_dataframe()"
]
},
{
Expand All @@ -169,11 +182,19 @@
"metadata": {},
"outputs": [],
"source": [
"# Get the list of ids from\n",
"sitetype_ids = get_ids_from_dataframe(sitetypes_dataframe)\n",
"# Descendents of an object type\n",
"obj_descendents = model.get_object_descendants(\"StringSetType\", sitetype_ids, \"PV_Assets\", return_format=\"dataframe\")\n",
"obj_descendents"
"sites.list_of_ids()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# String sets for all parks\n",
"string_sets_json = model.get_object_descendants(\"StringSetType\", sites.list_of_ids(), \"PV_Assets\")\n",
"string_sets = AnalyticsHelper(string_sets_json).dataframe\n",
"string_sets"
]
},
{
Expand All @@ -182,12 +203,8 @@
"metadata": {},
"outputs": [],
"source": [
"# All the sites on the OPC server\n",
"sites = model.get_objects_of_type('SiteType', return_format=\"dataframe\")\n",
"sites\n",
"# Selecting the first site\n",
"site = sites.iloc[0]\n",
"site_ids = [site['Id']]"
"first_site_id = sites.list_of_ids()[0]"
]
},
{
Expand All @@ -196,9 +213,10 @@
"metadata": {},
"outputs": [],
"source": [
"# Object descendants data of a specific site for that park\n",
"strings = model.get_object_descendants(\"StringSetType\", site_ids, \"PV_Assets\", return_format=\"dataframe\")\n",
"strings"
"# Get all stringsets for one park\n",
"string_sets_for_first_park_as_json = model.get_object_descendants(\"StringSetType\", [first_site_id], \"PV_Assets\")\n",
"string_sets_for_first_park = AnalyticsHelper(string_sets_for_first_park_as_json)\n",
"string_sets_for_first_park.dataframe"
]
},
{
Expand All @@ -207,9 +225,11 @@
"metadata": {},
"outputs": [],
"source": [
"# Ancestors of an object type\n",
"obj_ancestors = model.get_object_ancestors(\"TrackerType\", get_ids_from_dataframe(strings), \"PV_Serves\", return_format=\"dataframe\")\n",
"obj_ancestors "
"# Ancestors of an object type, get all trackers that are ancestor of the parks string sets\n",
"\n",
"trackers_as_json = model.get_object_ancestors(\"TrackerType\", string_sets_for_first_park.list_of_ids(), \"PV_Serves\")\n",
"trackers = AnalyticsHelper(trackers_as_json)\n",
"trackers.variables_as_dataframe()"
]
},
{
Expand All @@ -226,7 +246,7 @@
"outputs": [],
"source": [
"# Live value data of trackers \n",
"live_value = tsdata.get_live_values_data(['AngleMeasured', 'AngleSetpoint'], obj_ancestors)\n",
"live_value = tsdata.get_live_values_data(['AngleMeasured', 'AngleSetpoint'], trackers.dataframe)\n",
"live_value"
]
},
Expand All @@ -237,7 +257,7 @@
"outputs": [],
"source": [
"# 1 day aggregated historical trackers data\n",
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, obj_ancestors, ['AngleMeasured', 'AngleSetpoint'])"
"await tsdata.get_agg_hist_value_data(start_time, end_time, pro_interval, agg_name, trackers.dataframe, ['AngleMeasured', 'AngleSetpoint'])"
]
},
{
Expand Down
1 change: 1 addition & 0 deletions src/pyprediktormapclient/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import sys
from .shared import *
from .analytics_helper import *
from .model_index import *
from .opc_ua import *

Expand Down
Loading

0 comments on commit 2fd234f

Please sign in to comment.