Skip to content

Commit

Permalink
GH-211 Added fix for zos-jobs
Browse files Browse the repository at this point in the history
Signed-off-by: Uladzislau <[email protected]>
  • Loading branch information
KUGDev committed Sep 7, 2023
1 parent e88d66c commit bf120c4
Show file tree
Hide file tree
Showing 6 changed files with 59 additions and 58 deletions.
12 changes: 12 additions & 0 deletions src/core/zowe/core_for_zowe_sdk/sdk_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
Copyright Contributors to the Zowe Project.
"""

import urllib
from .exceptions import UnsupportedAuthType
from .request_handler import RequestHandler
from .session import Session, ISession
Expand Down Expand Up @@ -60,3 +61,14 @@ def _create_custom_request_arguments(self):
dictionary creation
"""
return self.request_arguments.copy()

def _adjust_for_url(self, str_to_adjust):
"""Adjust string to be correct in a URL
Returns
-------
adjusted_str
A string with special characters, acceptable for a URL
"""

return urllib.parse.quote(str_to_adjust) if str_to_adjust is not None else None
47 changes: 17 additions & 30 deletions src/zos_files/zowe/zos_files_for_zowe_sdk/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from zowe.zos_files_for_zowe_sdk import exceptions, constants
import os
import shutil
import urllib
from zowe.zos_files_for_zowe_sdk.constants import zos_file_constants, FileType

_ZOWE_FILES_DEFAULT_ENCODING='utf-8'
Expand Down Expand Up @@ -48,18 +47,6 @@ def __init__(self, connection):
self.default_headers["Accept-Encoding"] = "gzip"


def __adjust_for_url(self, str_to_adjust):
"""Adjust string to be correct in a URL
Returns
-------
adjusted_str
A string with special characters, acceptable for a URL
"""

return urllib.parse.quote(str_to_adjust) if str_to_adjust is not None else None


def list_files(self, path):
"""Retrieve a list of USS files based on a given pattern.
Expand Down Expand Up @@ -130,7 +117,7 @@ def list_dsn(self, name_pattern, return_attributes= False):
A JSON with a list of dataset names (and attributes if specified) matching the given pattern.
"""
custom_args = self._create_custom_request_arguments()
custom_args["params"] = {"dslevel": self.__adjust_for_url(name_pattern)}
custom_args["params"] = {"dslevel": self._adjust_for_url(name_pattern)}
custom_args["url"] = "{}ds".format(self.request_endpoint)


Expand Down Expand Up @@ -162,7 +149,7 @@ def list_dsn_members(self, dataset_name, member_pattern=None,
for k,v in additional_parms.items():
url = "{}{}{}={}".format(url,separator,k,v)
separator = '&'
custom_args['url'] = self.__adjust_for_url(url)
custom_args['url'] = self._adjust_for_url(url)
custom_args["headers"]["X-IBM-Max-Items"] = "{}".format(limit)
custom_args["headers"]["X-IBM-Attributes"] = attributes
response_json = self.request_handler.perform_request("GET", custom_args)
Expand Down Expand Up @@ -203,7 +190,7 @@ def copy_uss_to_dataset(self, from_filename, to_dataset_name, to_member_name=Non
path_to_member = f"{to_dataset_name}({to_member_name})" if to_member_name else to_dataset_name
custom_args = self._create_custom_request_arguments()
custom_args['json'] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(path_to_member))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(path_to_member))
response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json

Expand Down Expand Up @@ -258,7 +245,7 @@ def copy_dataset_or_member(self,from_dataset_name,to_dataset_name,from_member_na

custom_args = self._create_custom_request_arguments()
custom_args['json'] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(path_to_member))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(path_to_member))
response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json

Expand All @@ -271,7 +258,7 @@ def get_dsn_content(self, dataset_name):
A JSON with the contents of a given dataset
"""
custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
response_json = self.request_handler.perform_request("GET", custom_args)
return response_json

Expand Down Expand Up @@ -340,7 +327,7 @@ def create_data_set(self, dataset_name, options = {}):
options[opt] = options["lrecl"]

custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
custom_args["json"] = options
response_json = self.request_handler.perform_request("POST", custom_args, expected_code = [201])
return response_json
Expand Down Expand Up @@ -416,7 +403,7 @@ def create_default_data_set(self, dataset_name: str, default_type: str):
"dirblk": 25
}

custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
response_json = self.request_handler.perform_request("POST", custom_args, expected_code=[201])
return response_json

Expand Down Expand Up @@ -451,7 +438,7 @@ def get_dsn_content_streamed(self, dataset_name):
A raw socket response
"""
custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
raw_response = self.request_handler.perform_streamed_request("GET", custom_args)
return raw_response

Expand All @@ -470,7 +457,7 @@ def get_dsn_binary_content(self, dataset_name, with_prefixes=False):
The contents of the dataset with no transformation
"""
custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
custom_args["headers"]["Accept"] = "application/octet-stream"
if with_prefixes:
custom_args["headers"]["X-IBM-Data-Type"] = 'record'
Expand All @@ -494,7 +481,7 @@ def get_dsn_binary_content_streamed(self, dataset_name, with_prefixes=False):
The raw socket response
"""
custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
custom_args["headers"]["Accept"] = "application/octet-stream"
if with_prefixes:
custom_args["headers"]["X-IBM-Data-Type"] = 'record'
Expand All @@ -512,7 +499,7 @@ def write_to_dsn(self, dataset_name, data, encoding=_ZOWE_FILES_DEFAULT_ENCODING
A JSON containing the result of the operation
"""
custom_args = self._create_custom_request_arguments()
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))
custom_args["data"] = data
custom_args['headers']['Content-Type'] = 'text/plain; charset={}'.format(encoding)
response_json = self.request_handler.perform_request(
Expand Down Expand Up @@ -586,7 +573,7 @@ def delete_data_set(self, dataset_name, volume=None, member_name=None):
url = "{}ds/{}".format(self.request_endpoint, dataset_name)
if volume is not None:
url = "{}ds/-{}/{}".format(self.request_endpoint, volume, dataset_name)
custom_args["url"] = self.__adjust_for_url(url)
custom_args["url"] = self._adjust_for_url(url)
response_json = self.request_handler.perform_request(
"DELETE", custom_args, expected_code=[200, 202, 204])
return response_json
Expand Down Expand Up @@ -713,7 +700,7 @@ def recall_migrated_dataset(self, dataset_name: str, wait=False):

custom_args = self._create_custom_request_arguments()
custom_args["json"] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))

response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json
Expand Down Expand Up @@ -746,7 +733,7 @@ def delete_migrated_data_set(self, dataset_name: str, purge=False, wait=False):

custom_args = self._create_custom_request_arguments()
custom_args["json"] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))

response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json
Expand Down Expand Up @@ -775,7 +762,7 @@ def migrate_data_set(self, dataset_name: str, wait=False):

custom_args = self._create_custom_request_arguments()
custom_args["json"] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(dataset_name))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(dataset_name))

response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json
Expand Down Expand Up @@ -806,7 +793,7 @@ def rename_dataset(self, before_dataset_name: str, after_dataset_name: str):

custom_args = self._create_custom_request_arguments()
custom_args["json"] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(after_dataset_name).strip())
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(after_dataset_name).strip())

response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json
Expand Down Expand Up @@ -852,7 +839,7 @@ def rename_dataset_member(self, dataset_name: str, before_member_name: str, afte

custom_args = self._create_custom_request_arguments()
custom_args['json'] = data
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self.__adjust_for_url(path_to_member))
custom_args["url"] = "{}ds/{}".format(self.request_endpoint, self._adjust_for_url(path_to_member))

response_json = self.request_handler.perform_request("PUT", custom_args, expected_code=[200])
return response_json
14 changes: 7 additions & 7 deletions src/zos_jobs/zowe/zos_jobs_for_zowe_sdk/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def get_job_status(self, jobname, jobid):
"""
custom_args = self._create_custom_request_arguments()
job_url = "{}/{}".format(jobname, jobid)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
response_json = self.request_handler.perform_request("GET", custom_args)
return response_json
Expand All @@ -78,7 +78,7 @@ def cancel_job(self, jobname: str, jobid: str, modify_version="2.0"):

custom_args = self._create_custom_request_arguments()
job_url = "{}/{}".format(jobname, jobid)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
custom_args["json"] = {
"request": "cancel",
Expand Down Expand Up @@ -110,7 +110,7 @@ def delete_job(self, jobname, jobid, modify_version="2.0"):

custom_args = self._create_custom_request_arguments()
job_url = "{}/{}".format(jobname, jobid)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
custom_args["headers"]["X-IBM-Job-Modify-Version"] = modify_version

Expand All @@ -121,7 +121,7 @@ def _issue_job_request(self, req: dict, jobname: str, jobid: str, modify_version

custom_args = self._create_custom_request_arguments()
job_url = "{}/{}".format(jobname, jobid)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
custom_args["json"] = {
**req,
Expand Down Expand Up @@ -317,7 +317,7 @@ def get_spool_files(self, correlator):
"""
custom_args = self._create_custom_request_arguments()
job_url = "{}/files".format(correlator)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
response_json = self.request_handler.perform_request("GET", custom_args)
return response_json
Expand All @@ -336,7 +336,7 @@ def get_jcl_text(self, correlator):
"""
custom_args = self._create_custom_request_arguments()
job_url = "{}/files/JCL/records".format(correlator)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
response_json = self.request_handler.perform_request("GET", custom_args)
return response_json
Expand All @@ -360,7 +360,7 @@ def get_spool_file_contents(self, correlator, id):
"""
custom_args = self._create_custom_request_arguments()
job_url = "{}/files/{}/records".format(correlator, id)
request_url = "{}{}".format(self.request_endpoint, job_url)
request_url = "{}{}".format(self.request_endpoint, self._adjust_for_url(job_url))
custom_args["url"] = request_url
response_json = self.request_handler.perform_request("GET", custom_args)
return response_json
Expand Down
24 changes: 5 additions & 19 deletions tests/unit/test_zos_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,6 @@ def test_object_should_be_instance_of_class(self):
files = Files(self.test_profile)
self.assertIsInstance(files, Files)


def test_adjust_for_url(self):
"""Test dataset name is being adjusted to the correct URL parameter"""

files = Files(self.test_profile)

actual_not_empty = files._Files__adjust_for_url('[email protected]#.$HERE')
expected_not_empty = 'MY.DSN%40.TEST%23.%24HERE'
self.assertEqual(actual_not_empty, expected_not_empty)

actual_none = files._Files__adjust_for_url(None)
expected_none = None
self.assertEqual(actual_none, expected_none)


@mock.patch('requests.Session.send')
def test_delete_uss(self, mock_send_request):
"""Test deleting a directory recursively sends a request"""
Expand Down Expand Up @@ -326,10 +311,10 @@ def test_rename_dataset_member_raises_exception(self):
def test_rename_dataset_member_parametrized(self):
"""Test renaming a dataset member with different values"""
test_values = [
(('DSN', "MBROLD", "MBRNEW", "EXCLU"), True),
(('DSN', "MBROLD", "MBRNEW", "SHRW"), True),
(('DSN', "MBROLD$", "MBRNEW", "EXCLU"), True),
(('DSN', "MBROLD#", "MBRNEW", "SHRW"), True),
(('DSN', "MBROLD", "MBRNEW", "INVALID"), False),
(('DATA.SET.NAME', 'MEMBEROLD', 'MEMBERNEW'), True),
(('DATA.SET.@NAME', 'MEMBEROLD', 'MEMBERNEW'), True),
(('DS.NAME', "MONAME", "MNNAME"), True),
]

Expand All @@ -353,7 +338,8 @@ def test_rename_dataset_member_parametrized(self):
custom_args = files_test_profile._create_custom_request_arguments()
custom_args["json"] = data
ds_path = "{}({})".format(test_case[0][0], test_case[0][2])
ds_path_adjusted = files_test_profile._Files__adjust_for_url(ds_path)
ds_path_adjusted = files_test_profile._adjust_for_url(ds_path)
self.assertNotRegex(ds_path_adjusted, r'[\$\@\#]')
custom_args["url"] = "https://mock-url.com:443/zosmf/restfiles/ds/{}".format(ds_path_adjusted)
files_test_profile.request_handler.perform_request.assert_called_once_with("PUT", custom_args,
expected_code=[200])
Expand Down
7 changes: 5 additions & 2 deletions tests/unit/test_zos_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def test_modified_version_error(self, mock_send_request):
def test_cancel_job_modify_version_parameterized(self):
"""Test cancelling a job with different values sends the expected request"""
test_values = [
(("TESTJOB", "JOB00010", "1.0"), True),
(("TESTJOB", "JOB$0010", "1.0"), True),
(("TESTJOBN", "JOB00011", "2.0"), True),
(("TESTJOB", "JOB00012", "2"), False),
(("TESTJOBN", "JOB00113", "3.0"), False),
Expand All @@ -100,7 +100,10 @@ def test_cancel_job_modify_version_parameterized(self):
"request": "cancel",
"version": test_case[0][2],
}
custom_args["url"] = "https://mock-url.com:443/zosmf/restjobs/jobs/{}/{}".format(test_case[0][0], test_case[0][1])
job_url = "{}/{}".format(test_case[0][0], test_case[0][1])
job_url_adjusted = jobs_test_object._adjust_for_url(job_url)
self.assertNotRegex(job_url_adjusted, r'\$')
custom_args["url"] = "https://mock-url.com:443/zosmf/restjobs/jobs/{}".format(job_url_adjusted)
jobs_test_object.request_handler.perform_request.assert_called_once_with("PUT", custom_args, expected_code=[202, 200])
else:
with self.assertRaises(ValueError) as e_info:
Expand Down
13 changes: 13 additions & 0 deletions tests/unit/test_zowe_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,19 @@ def test_should_handle_token_auth(self):
self.token_props["tokenType"] + "=" + self.token_props["tokenValue"],
)

def test_adjust_for_url(self):
"""Test string is being adjusted to the correct URL parameter"""

sdk_api = SdkApi(self.basic_props, self.default_url)

actual_not_empty = sdk_api._adjust_for_url('[email protected]#.$HERE')
expected_not_empty = 'MY.STRING%40.TEST%23.%24HERE'
self.assertEqual(actual_not_empty, expected_not_empty)

actual_none = sdk_api._adjust_for_url(None)
expected_none = None
self.assertEqual(actual_none, expected_none)


class TestRequestHandlerClass(unittest.TestCase):
"""RequestHandler class unit tests."""
Expand Down

0 comments on commit bf120c4

Please sign in to comment.