Skip to content

Commit

Permalink
Merge pull request #157 from atlanticwave-sdx/156-strict-connection-v…
Browse files Browse the repository at this point in the history
…alidation-check

validating against time and qos_metrics values
  • Loading branch information
YufengXin authored Jan 13, 2025
2 parents 11bd074 + c1e7795 commit 6f88788
Show file tree
Hide file tree
Showing 14 changed files with 193 additions and 30 deletions.
Binary file modified connection_transition.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ dependencies = [
"networkx",
"transitions",
"fastapi",
"pytz",
"importlib-resources; python_version < '3.9'",
]

Expand Down
17 changes: 17 additions & 0 deletions src/sdx_datamodel/connection_sm.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class State(Enum):
FAILED = auto()
RECOVERING = auto()
DELETED = auto()
MAINTENANCE = auto()

def __str__(self):
return self.name
Expand All @@ -71,6 +72,8 @@ class Trigger(Enum):
RECOVER_SUCCESS = auto()
RECOVER_FAIL = auto()
DELETE = auto()
MAINTENANCE_DOWN = auto()
MAINTENANCE_UP = auto()

def __str__(self):
return self.name
Expand Down Expand Up @@ -141,6 +144,16 @@ def __str__(self):
"source": str(State.FAILED),
"dest": str(State.DELETED),
},
{
"trigger": str(Trigger.MAINTENANCE_DOWN),
"source": str(State.PROVISIONED),
"dest": str(State.MAINTENANCE),
},
{
"trigger": str(Trigger.MAINTENANCE_UP),
"source": str(State.MAINTENANCE),
"dest": str(State.PROVISIONED),
},
]

def __init__(self):
Expand Down Expand Up @@ -193,6 +206,10 @@ def set_state(self, state):
self.state = state


class ControllerStateMachine(ConnectionStateMachine):
name = "SDX Controller State Machine"


def draw_transition(model, output):
machine = GraphMachine(
model=model,
Expand Down
4 changes: 2 additions & 2 deletions src/sdx_datamodel/data/requests/test-l2vpn-p2p-v2.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
],
"description": "a test circuit",
"scheduling": {
"start_time": "2024-06-24T01:00:00.000Z",
"end_time": "2024-06-26T01:00:00.000Z"
"start_time": "2030-06-24T01:00:00.000",
"end_time": "2030-06-26T01:00:00.000"
},
"qos_metrics": {
"min_bw": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
],
"description": "a test circuit",
"scheduling": {
"start_time": "2024-06-24T01:00:00.000Z",
"end_time": "2024-06-26T01:00:00.000Z"
"start_time": "2030-06-24T01:00:00.000",
"end_time": "2030-06-26T01:00:00.000"
},
"qos_metrics": {
"min_bw": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
],
"description": "a test circuit",
"scheduling": {
"start_time": "2024-06-24T01:00:00.000Z",
"end_time": "2024-06-26T01:00:00.000Z"
"start_time": "2030-06-24T01:00:00.000",
"end_time": "2030-06-26T01:00:00.000"
},
"qos_metrics": {
"min_bw": {
Expand Down
4 changes: 2 additions & 2 deletions src/sdx_datamodel/data/requests/test_request.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"id": "285eea4b-1e86-4d54-bd75-f14b8cb4a63a",
"name": "Test connection request",
"start_time": "2000-01-23T04:56:07.000Z",
"end_time": "2000-01-23T04:56:07.000Z",
"start_time": "2030-01-23T04:56:07.000",
"end_time": "2030-01-23T04:56:07.000",
"bandwidth_required": 10,
"latency_required": 300,
"egress_port": {
Expand Down
4 changes: 2 additions & 2 deletions src/sdx_datamodel/data/requests/test_request_no_node.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"id": "id",
"name": "AMLight",
"start_time": "2000-01-23T04:56:07.000Z",
"end_time": "2000-01-23T04:56:07.000Z",
"start_time": "2030-01-23T04:56:07.000",
"end_time": "2030-01-23T04:56:07.000",
"bandwidth_required": 100,
"latency_required": 20,
"egress_port": {
Expand Down
4 changes: 2 additions & 2 deletions src/sdx_datamodel/data/requests/test_request_p2p.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"id": "id",
"name": "AMLight",
"start_time": "2000-01-23T04:56:07.000Z",
"end_time": "2000-01-23T04:56:07.000Z",
"start_time": "2030-01-23T04:56:07.000",
"end_time": "2030-01-23T04:56:07.000",
"bandwidth_required": 100,
"latency_required": 20,
"egress_port": {
Expand Down
25 changes: 25 additions & 0 deletions src/sdx_datamodel/models/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def __init__(
packetloss_measured=None,
availability_required=None,
availability_measured=None,
max_number_oxps=None,
paths=None,
status=None,
complete=False,
Expand Down Expand Up @@ -113,6 +114,7 @@ def __init__(
"packetloss_measured": float,
"availability_required": float,
"availability_measured": float,
"max_number_oxps": int,
"paths": List[str],
"status": str,
"complete": bool,
Expand All @@ -139,6 +141,7 @@ def __init__(
"packetloss_measured": "packetloss_measured",
"availability_required": "availability_required",
"availability_measured": "availability_measured",
"max_number_oxps": "max_number_oxps",
"paths": "paths",
"status": "status",
"complete": "complete",
Expand All @@ -163,6 +166,7 @@ def __init__(
self._packetloss_measured = packetloss_measured
self._availability_required = availability_required
self._availability_measured = availability_measured
self._max_number_oxps = max_number_oxps
self._paths = paths
self._status = status
self._complete = complete
Expand Down Expand Up @@ -665,6 +669,27 @@ def availability_measured(self, availability_measured):

self._availability_measured = availability_measured

@property
def max_number_oxps(self):
"""Gets the max_number_oxps of this Connection.
:return: The max_number_oxps of this Connection.
:rtype: int
"""
return self._max_number_oxps

@max_number_oxps.setter
def max_number_oxps(self, max_number_oxps):
"""Sets the max_number_oxps of this Connection.
:param max_number_oxps: The max_number_oxps of this Connection.
:type max_number_oxps: int
"""

self._max_number_oxps = max_number_oxps

@property
def paths(self):
"""Gets the paths of this Connection.
Expand Down
8 changes: 7 additions & 1 deletion src/sdx_datamodel/parsing/connectionhandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def import_connection_data(self, data: dict) -> Connection:
name = data["name"]
bandwidth_required = None
latency_required = None
max_number_oxps = None
if data.get("endpoints") is not None: # spec version 2.0.0
endpoints = data.get("endpoints")
if len(endpoints) != 2:
Expand All @@ -41,9 +42,13 @@ def import_connection_data(self, data: dict) -> Connection:
bandwidth_required_obj = qos_metrics.get("min_bw")
if bandwidth_required_obj is not None:
bandwidth_required = bandwidth_required_obj.get("value")
latency_required_obj = qos_metrics.get("max_latency")
latency_required_obj = qos_metrics.get("max_delay")
if latency_required_obj is not None:
latency_required = latency_required_obj.get("value")
if qos_metrics.get("max_number_oxps") is not None:
max_number_oxps = qos_metrics.get("max_number_oxps").get(
"value"
)

scheduling = data.get("scheduling", {})
start_time = scheduling.get("start_time")
Expand All @@ -70,6 +75,7 @@ def import_connection_data(self, data: dict) -> Connection:
end_time=end_time,
bandwidth_required=bandwidth_required,
latency_required=latency_required,
max_number_oxps=max_number_oxps,
ingress_port=ingress_port,
egress_port=egress_port,
)
Expand Down
94 changes: 79 additions & 15 deletions src/sdx_datamodel/validation/connectionvalidator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@
"""

import logging
from datetime import datetime
from re import match

import pytz

from sdx_datamodel.models.connection import Connection
from sdx_datamodel.models.port import Port

Expand Down Expand Up @@ -55,13 +58,53 @@ def _validate_connection(self, conn: Connection):
:return: A list of any issues in the data.
"""

errors = []
errors += self._validate_object_defaults(conn)
errors += self._validate_port(conn.ingress_port, conn)
errors += self._validate_port(conn.egress_port, conn)

# errors += self._validate_time(conn.start_time, conn)
# errors += self._validate_time(conn.end_time, conn)
if conn.start_time or conn.end_time:
errors += self._validate_time(conn.start_time, conn.end_time, conn)

if conn.latency_required:
errors += self._validate_qos_metrics_value(
"max_delay", conn.latency_required, 1000
)

if conn.bandwidth_required:
errors += self._validate_qos_metrics_value(
"min_bw", conn.bandwidth_required, 100
)

if conn.max_number_oxps:
errors += self._validate_qos_metrics_value(
"max_number_oxps", conn.bandwidth_required, 100
)
return errors

def _validate_qos_metrics_value(self, metric, value, max_value):
"""
Validate that the QoS Metrics provided meets the XSD standards.
A connection must have the following:
- It must meet object default standards.
- The max_delay must be a number
- The max_number_oxps must be a number
:param qos_metrics: The QoS Metrics being evaluated.
:return: A list of any issues in the data.
"""
errors = []

if not isinstance(value, int):
errors.append(f"{value} {metric} must be a number")
if not (0 <= value <= max_value):
errors.append(f"{value} {metric} must be between 0 and 1000")

return errors

Expand Down Expand Up @@ -94,26 +137,47 @@ def _validate_port(self, port: Port, conn: Connection):
"""
return errors

def _validate_time(self, time: str, conn: Connection):
def _validate_time(self, start_time: str, end_time: str, conn: Connection):
"""
Validate that the time provided meets the XSD standards.
A port must have the following:
- It must meet object default standards.
- A link can only connect to 2 nodes
- The nodes that a link is connected to must be in the
parent Topology's nodes list
:param time: time being validated
:param start_time, end_time: time being validated
:return: A list of any issues in the data.
"""
utc = pytz.UTC
errors = []
if not match(ISO_TIME_FORMAT, time):
errors.append(f"{time} time needs to be in full ISO format")
# if not match(ISO_TIME_FORMAT, time):
# errors.append(f"{time} time needs to be in full ISO format")
if not start_time:
start_time = str(datetime.now())
try:
start_time_obj = datetime.fromisoformat(start_time)
start_time = start_time_obj.replace(tzinfo=utc)
if start_time < datetime.now().replace(tzinfo=utc):
errors.append(
f"{start_time} start_time cannot be before the current time"
)
except ValueError:
errors.append(
f"{start_time} start_time is not in a valid ISO format"
)
if end_time:
try:
end_time_obj = datetime.fromisoformat(end_time)
end_time = end_time_obj.replace(tzinfo=utc)
if (
end_time < datetime.now().replace(tzinfo=utc)
or end_time < start_time
):
errors.append(
f"{end_time} end_time cannot be before the current or start time"
)
except ValueError:
errors.append(
f"{end_time} end_time is not in a valid ISO format"
)

return errors

def _validate_object_defaults(self, sdx_object):
Expand Down
Loading

0 comments on commit 6f88788

Please sign in to comment.