Skip to content

Commit

Permalink
Module for Protection Rules, Recovery Plans and Recovery Plan Jobs (#258
Browse files Browse the repository at this point in the history
)

* Design Specs for DR and Protection Policy Info Module

* Protection Policies backened and Recovery Plan Jobs info module

* Recovery Plan module backened code

* Recovery Plan backened changes

* Recovery plan jobs backened module utils code

* formatting

* Add floating IP spec for recovery plans

* protection rules crud tests

* sanity fix &doc

* Add recovery plan and tests

* Infor module tests for DR

* Minor fix

* Ommit IDP user tests

* User groups test fix

* Minor fixes

* formatting

* Fix Projects, acps and roles idempotency checks, to have more granular checks for updates. Fix tests as well.

* Formatting

* isort fix

* Add DR vm in cleanup

* sanity fix

* multiple test fixes

* fix entity fetch_url for big responses

* Sanity fixes

* Sanity and test fixes

* sanity fixes

* Recovery plan test fixes

* isort fix

* fix py2.7 issue

* Test changes

Co-authored-by: alaa-bish <[email protected]>
  • Loading branch information
bhati-pradeep and alaa-bish authored Aug 26, 2022
1 parent 06461f5 commit 26faae6
Show file tree
Hide file tree
Showing 47 changed files with 4,748 additions and 303 deletions.
2 changes: 1 addition & 1 deletion galaxy.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
namespace: "nutanix"
name: "ncp"
version: "1.4.0"
version: "1.5.0"
readme: "README.md"
authors:
- "Abhishek Chaudhary (@abhimutant)"
Expand Down
8 changes: 7 additions & 1 deletion meta/runtime.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,11 @@ action_groups:
- ntnx_images
- ntnx_image_placement_policy
- ntnx_pbrs
- ntnx_roles
- ntnx_projects
- ntnx_protection_rules
- ntnx_recovery_plans
- ntnx_recovery_plan_jobs
- ntnx_roles
- ntnx_security_rules
- ntnx_service_groups
- ntnx_static_routes
Expand All @@ -32,6 +35,9 @@ action_groups:
- ntnx_pbrs_info
- ntnx_permissions_info
- ntnx_projects_info
- ntnx_protection_rules_info
- ntnx_recovery_plans_info
- ntnx_recovery_plan_jobs_info
- ntnx_roles_info
- ntnx_security_rules_info
- ntnx_service_groups_info
Expand Down
83 changes: 48 additions & 35 deletions plugins/module_utils/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function

try:
from http.client import IncompleteRead
except ImportError:
from httplib import IncompleteRead # python2

__metaclass__ = type

import copy
Expand Down Expand Up @@ -314,10 +309,9 @@ def _build_url_with_query(self, url, query):
def _fetch_url(
self, url, method, data=None, raise_error=True, no_response=False, timeout=30
):

# only jsonify if content-type supports, added to avoid incase of form-url-encodeded type data
if self.headers["Content-Type"] == "application/json":
data = self.module.jsonify(data) if data else None
if self.headers["Content-Type"] == "application/json" and data is not None:
data = self.module.jsonify(data)

resp, info = fetch_url(
self.module,
Expand All @@ -330,40 +324,59 @@ def _fetch_url(
)

status_code = info.get("status")
body = resp.read() if resp else info.get("body")
while True:
try:
resp_json = json.loads(to_text(body)) if body else None
except ValueError:
resp_json = None
except IncompleteRead:
continue

if not raise_error:
return resp_json
body = None

if status_code >= 300:
err = info.get("msg", "Status code != 2xx")
self.module.fail_json(
msg="Failed fetching URL: {0}".format(url),
status_code=status_code,
error=err,
response=resp_json,
)
# buffer size with ref. to max read size of http.client.HTTPResponse.read() defination
buffer_size = 65536
if not resp:
# get body containing error
body = info.get("body")
else:
# For case when response body size is > 65536, read() will fail due to http.client.IncompleteRead exception
# This eventually closes connection and can't read response further.
# So here we read all content in chunks (of size < 65536) and combine data at last to get final response.
resp_chunk = None
resp_chunks = []
while True:
resp_chunk = resp.read(buffer_size)
if resp_chunk:
resp_chunks.append(to_text(resp_chunk.decode("utf-8")))
else:
break

if no_response:
return {"status_code": status_code}
body = "".join(resp_chunks)

if not resp_json:
self.module.fail_json(
msg="Failed to convert API response to json",
status_code=status_code,
error=body,
response=resp_json,
)
try:
resp_json = json.loads(to_text(body)) if body else None
except ValueError:
resp_json = None

if not raise_error:
return resp_json

if status_code >= 300:
err = info.get("msg", "Status code != 2xx")
self.module.fail_json(
msg="Failed fetching URL: {0}".format(url),
status_code=status_code,
error=err,
response=resp_json,
)

if no_response:
return {"status_code": status_code}

if not resp_json:
self.module.fail_json(
msg="Failed to convert API response to json",
status_code=status_code,
error=body,
response=resp_json,
)

return resp_json

# upload file in chunks to the given url
def _upload_file(
self, url, source, method, raise_error=True, no_response=False, timeout=30
Expand Down
141 changes: 141 additions & 0 deletions plugins/module_utils/prism/protection_rules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function

from copy import deepcopy

from ..utils import convert_to_secs
from .prism import Prism

__metaclass__ = type


class ProtectionRule(Prism):
def __init__(self, module):
resource_type = "/protection_rules"
super(ProtectionRule, self).__init__(module, resource_type=resource_type)
self.build_spec_methods = {
"name": self._build_spec_name,
"desc": self._build_spec_desc,
"start_time": self._build_spec_start_time,
"protected_categories": self._build_spec_protected_categories,
"schedules": self._build_spec_schedules,
}

def get_affected_entities(self, rule_uuid):
return self.read(uuid=rule_uuid, endpoint="query_entities")

def _get_default_spec(self):
return deepcopy(
{
"api_version": "3.1.0",
"metadata": {"kind": "protection_rule"},
"spec": {
"resources": {
"availability_zone_connectivity_list": [],
"ordered_availability_zone_list": [],
"category_filter": {
"params": {},
"type": "CATEGORIES_MATCH_ANY",
},
"primary_location_list": [],
},
"name": None,
},
}
)

def _build_spec_name(self, payload, name):
payload["spec"]["name"] = name
return payload, None

def _build_spec_desc(self, payload, desc):
payload["spec"]["description"] = desc
return payload, None

def _build_spec_start_time(self, payload, start_time):
payload["spec"]["resources"]["start_time"] = start_time
return payload, None

def _build_spec_protected_categories(self, payload, categories):
payload["spec"]["resources"]["category_filter"]["params"] = categories
return payload, None

def _build_spec_schedules(self, payload, schedules):
ordered_az_list = []
az_connectivity_list = []

if self.module.params.get("primary_site"):
ordered_az_list.append(self.module.params["primary_site"])
elif len(payload["spec"]["resources"]["primary_location_list"]) == 0:
return None, "Please provide primary_site spec"

# create ordered_availability_zone_list
for schedule in schedules:
if schedule.get("source") and schedule["source"] not in ordered_az_list:
ordered_az_list.append(schedule["source"])
if (
schedule.get("destination")
and schedule["destination"] not in ordered_az_list
):
ordered_az_list.append(schedule["destination"])
payload["spec"]["resources"]["ordered_availability_zone_list"] = ordered_az_list

if self.module.params.get("primary_site"):
payload["spec"]["resources"]["primary_location_list"] = [
ordered_az_list.index(self.module.params["primary_site"])
]

# create availability_zone_connectivity_list from schedules
for schedule in schedules:
az_connection_spec = {}
spec = {}
if schedule.get("source"):
az_connection_spec[
"source_availability_zone_index"
] = ordered_az_list.index(schedule["source"])
if schedule.get("destination"):
az_connection_spec[
"destination_availability_zone_index"
] = ordered_az_list.index(schedule["destination"])

if schedule["protection_type"] == "ASYNC":
if (
not (schedule.get("rpo") and schedule.get("rpo_unit"))
and schedule.get("snapshot_type")
and (
schedule.get("local_retention_policy")
or schedule.get("remote_retention_policy")
)
):
return (
None,
"rpo, rpo_unit, snapshot_type and atleast one policy are required fields for aysynchronous snapshot schedule",
)

spec["recovery_point_objective_secs"], err = convert_to_secs(
schedule["rpo"], schedule["rpo_unit"]
)
if err:
return None, err

spec["snapshot_type"] = schedule["snapshot_type"]
if schedule.get("local_retention_policy"):
spec["local_snapshot_retention_policy"] = schedule[
"local_retention_policy"
]
if schedule.get("remote_retention_policy"):
spec["remote_snapshot_retention_policy"] = schedule[
"remote_retention_policy"
]
else:
if schedule.get("auto_suspend_timeout"):
spec["auto_suspend_timeout_secs"] = schedule["auto_suspend_timeout"]
spec["recovery_point_objective_secs"] = 0
az_connection_spec["snapshot_schedule_list"] = [spec]
az_connectivity_list.append(az_connection_spec)

payload["spec"]["resources"][
"availability_zone_connectivity_list"
] = az_connectivity_list
return payload, None
100 changes: 100 additions & 0 deletions plugins/module_utils/prism/recovery_plan_jobs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
# This file is part of Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function

from copy import deepcopy

from ..prism.recovery_plans import get_recovery_plan_uuid
from .prism import Prism

__metaclass__ = type


class RecoveryPlanJob(Prism):
def __init__(self, module):
resource_type = "/recovery_plan_jobs"
super(RecoveryPlanJob, self).__init__(module, resource_type=resource_type)
self.build_spec_methods = {
"name": self._build_spec_name,
"recovery_plan": self._build_spec_recovery_plan,
"failed_site": self._build_spec_failed_site,
"recovery_site": self._build_spec_recovery_site,
"action": self._build_spec_action,
"recovery_reference_time": self._build_spec_recovery_reference_time,
"ignore_validation_failures": self._build_spec_ignore_validation_failures,
}
self.action_endpoints = {"CLEANUP": "cleanup"}

def perform_action_on_existing_job(self, job_uuid, action):
endpoint = "{0}/{1}".format(job_uuid, self.action_endpoints[action])
data = {}
return self.create(data=data, endpoint=endpoint)

def _get_default_spec(self):
return deepcopy(
{
"api_version": "3.1.0",
"metadata": {"kind": "recovery_plan_job"},
"spec": {
"resources": {
"execution_parameters": {
"failed_availability_zone_list": [],
"recovery_availability_zone_list": [],
"action_type": None,
},
"recovery_plan_reference": {},
},
"name": None,
},
}
)

def _build_spec_name(self, payload, name):
payload["spec"]["name"] = name
return payload, None

def _build_spec_recovery_plan(self, payload, recovery_plan):
uuid, err = get_recovery_plan_uuid(recovery_plan, self.module)
if err:
return None, err
payload["spec"]["resources"]["recovery_plan_reference"] = {
"uuid": uuid,
"kind": "recovery_plan",
}
return payload, None

def _build_spec_failed_site(self, payload, failed_site):
az_spec = {"availability_zone_url": failed_site["url"]}
if failed_site.get("cluster"):
az_spec["cluster_reference_list"] = [{"uuid": failed_site["cluster"]}]
payload["spec"]["resources"]["execution_parameters"][
"failed_availability_zone_list"
] = [az_spec]
return payload, None

def _build_spec_recovery_site(self, payload, recovery_site):
az_spec = {"availability_zone_url": recovery_site["url"]}
if recovery_site.get("cluster"):
az_spec["cluster_reference_list"] = [{"uuid": recovery_site["cluster"]}]
payload["spec"]["resources"]["execution_parameters"][
"recovery_availability_zone_list"
] = [az_spec]
return payload, None

def _build_spec_action(self, payload, action):
payload["spec"]["resources"]["execution_parameters"]["action_type"] = action
return payload, None

def _build_spec_recovery_reference_time(self, payload, recovery_reference_time):
payload["spec"]["resources"]["execution_parameters"][
"recovery_reference_time"
] = recovery_reference_time
return payload, None

def _build_spec_ignore_validation_failures(
self, payload, ignore_validation_failures
):
payload["spec"]["resources"]["execution_parameters"][
"should_continue_on_validation_failure"
] = ignore_validation_failures
return payload, None
Loading

0 comments on commit 26faae6

Please sign in to comment.