Skip to content

Commit

Permalink
Merge pull request #72 from kbaseapps/dcchivian-patch-1
Browse files Browse the repository at this point in the history
bump version to 0.1.3 to permit release on CI
  • Loading branch information
MrCreosote authored Dec 12, 2020
2 parents e36bf01 + dd6a938 commit 99129b3
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 73 deletions.
31 changes: 0 additions & 31 deletions .travis.yml

This file was deleted.

1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ RUN pip install semver \
&& pip install python-magic \
&& pip install ftputil \
&& pip install ipython==5.3.0 \
&& pip install pyftpdlib==1.5.6 \
&& sudo apt-get install nano
# -----------------------------------------

Expand Down
11 changes: 10 additions & 1 deletion RELEASE_NOTES.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,17 @@
#0.1.3
- tidying for github consistency
- Added Actions workflow for KB SDK tests
- Added Dependabot and LGTM configurations
- Updated `README.md` to include standard build, coverage, and LGTM badging.

#0.1.2
- deprecate handle service with handle service 2
- replace the colon(:) [that was reported to have caused download error for Windows users] with underscore in shock filenames

#0.1.1
- close no longer used sockets.

#0.1.0

- shock attributes are now ignored on upload. In a future release they will be removed altogether
and specifying attributes for upload will be an error.
- shock indexes and attributes are no longer copied during a copy or ownership operation.
2 changes: 1 addition & 1 deletion kbase.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ service-language:
python

module-version:
0.1.2
0.1.3

owners:
[rsutormin, msneddon, gaprice, scanon, tgu2]
93 changes: 53 additions & 40 deletions test/DataFileUtil_server_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import ftplib
import gzip
import os.path
import errno
import shutil
import tarfile
import tempfile
Expand All @@ -15,6 +16,11 @@
import requests
import semver

from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import ThreadedFTPServer
import threading

from DataFileUtil.DataFileUtilImpl import DataFileUtil, ShockException, HandleError, WorkspaceError
from DataFileUtil.DataFileUtilServer import MethodContext
from DataFileUtil.authclient import KBaseAuth as _KBaseAuth
Expand Down Expand Up @@ -64,6 +70,28 @@ def setUpClass(cls):
wsName = "test_DataFileUtil_" + str(suffix)
cls.ws_info = cls.ws.create_workspace({'workspace': wsName})

cls.ftp_domain = 'localhost'
cls.ftp_port = 21
thread = threading.Thread(target=cls.start_ftp_service,
args=(cls.ftp_domain, cls.ftp_port))
thread.daemon = True
thread.start()
time.sleep(5)

@classmethod
def start_ftp_service(cls, domain, port):

print('starting ftp service')
authorizer = DummyAuthorizer()
authorizer.add_anonymous(os.getcwd(), perm='elradfmwMT')

handler = FTPHandler
handler.authorizer = authorizer

address = (domain, port)
with ThreadedFTPServer(address, handler) as server:
server.serve_forever()

@classmethod
def tearDownClass(cls):
if hasattr(cls, 'ws_info'):
Expand Down Expand Up @@ -225,7 +253,7 @@ def test_unpack_large_zip(self):
zip_filename = 'large_file.txt.zip'
tmp_dir = os.path.join(self.cfg['scratch'], 'unpacklargeziptest')
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
os.makedirs(tmp_dir)
zip_file_path = os.path.join(tmp_dir, zip_filename)
txt_file_path = os.path.join(tmp_dir, txt_filename)

Expand Down Expand Up @@ -547,7 +575,7 @@ def fail_unpack(self, file_path, unpack, error):
self.fail_download({'shock_id': sid,
'file_path': td,
'unpack': unpack},
error)
error)
self.delete_shock_node(sid)

def test_uncompress_on_archive(self):
Expand Down Expand Up @@ -660,7 +688,7 @@ def test_pack_large_zip(self):
filename = 'large_file.txt'
tmp_dir = os.path.join(self.cfg['scratch'], 'packlargeziptest')
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
os.makedirs(tmp_dir)
file_path = os.path.join(tmp_dir, filename)

size_3GB = 3 * 1024 * 1024 * 1024
Expand Down Expand Up @@ -730,7 +758,12 @@ def test_download_existing_dir(self):
{'file_path': 'data/file1.txt'})[0]
sid = ret1['shock_id']
d = 'foobarbazbingbang'
os.mkdir(d)
try:
os.mkdir(d)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
pass
ret2 = self.impl.shock_to_file(self.ctx, {'shock_id': sid,
'file_path': d + '/foo',
}
Expand Down Expand Up @@ -767,20 +800,7 @@ def test_download_err_node_not_found(self):
'Error downloading file from shock node ' +
'79261fd9-ae10-4a84-853d-1b8fcd57c8f23: Node not found',
exception=ShockException)

def test_download_err_node_has_no_file(self):
# test attempting download on a node without a file.
res = requests.post(
self.shockURL + '/node/',
headers={'Authorization': 'OAuth ' + self.token}).json()
self.fail_download(
{'shock_id': res['data']['id'],
'file_path': 'foo'
},
'Node {} has no file'.format(res['data']['id']),
exception=ShockException)
self.delete_shock_node(res['data']['id'])


def test_download_err_no_node_provided(self):
self.fail_download(
{'shock_id': '',
Expand Down Expand Up @@ -855,7 +875,7 @@ def test_copy_err_node_not_found(self):
{'shock_id': '79261fd9-ae10-4a84-853d-1b8fcd57c8f23'},
'Error copying Shock node ' +
'79261fd9-ae10-4a84-853d-1b8fcd57c8f23: ' +
'err@node_CreateNodeUpload: not found',
'Invalid copy_data: invalid UUID length: 37',
exception=ShockException)

def test_copy_err_no_node_provided(self):
Expand Down Expand Up @@ -1288,7 +1308,6 @@ def test_download_staging_file_archive_file(self):
self.assertEqual(os.stat(os.path.join("data", "zip1.zip")).st_size,
os.stat(ret1['copy_file_path']).st_size)


def fail_download_web_file(self, params, error, exception=ValueError, startswith=False):
with self.assertRaises(exception) as context:
self.impl.download_web_file(self.ctx, params)
Expand Down Expand Up @@ -1379,9 +1398,9 @@ def test_fail_download_web_file_ftp(self):

invalid_input_params = {
'download_type': 'FTP',
'file_url': 'ftp://ftp.uconn.edu/48_hour/nonexist.txt'}
'file_url': 'ftp://{}/{}'.format(self.ftp_domain, 'nonexist.txt')}
error_msg = "File nonexist.txt does NOT exist in FTP path: "
error_msg += "ftp.uconn.edu/48_hour"
error_msg += self.ftp_domain + '/'
self.fail_download_web_file(invalid_input_params, error_msg)

def test_download_direct_link_uncompress_file(self):
Expand Down Expand Up @@ -1593,18 +1612,16 @@ def test_download_ftp_link_uncompress_file(self):

fq_filename = "file1.txt"

with ftplib.FTP('ftp.uconn.edu') as ftp_connection:
with ftplib.FTP(self.ftp_domain) as ftp_connection:
ftp_connection.login('anonymous', '[email protected]')
ftp_connection.cwd("/48_hour/")

if fq_filename not in ftp_connection.nlst():
fh = open(os.path.join("data", fq_filename), 'rb')
ftp_connection.storbinary('STOR file1.txt', fh)
fh.close()
with open(os.path.join("data", fq_filename), 'rb') as fh:
ftp_connection.storbinary('STOR {}'.format(fq_filename), fh)

params = {
'download_type': 'FTP',
'file_url': 'ftp://ftp.uconn.edu/48_hour/file1.txt',
'file_url': 'ftp://{}/{}'.format(self.ftp_domain, fq_filename),
}

ret1 = self.impl.download_web_file(self.ctx, params)[0]
Expand All @@ -1618,18 +1635,16 @@ def test_download_ftp_link_compress_file(self):

fq_filename = "file1.txt.bz"

with ftplib.FTP('ftp.uconn.edu') as ftp_connection:
with ftplib.FTP(self.ftp_domain) as ftp_connection:
ftp_connection.login('anonymous', '[email protected]')
ftp_connection.cwd("/48_hour/")

if fq_filename not in ftp_connection.nlst():
fh = open(os.path.join("data", fq_filename), 'rb')
ftp_connection.storbinary('STOR file1.txt.bz', fh)
fh.close()
with open(os.path.join("data", fq_filename), 'rb') as fh:
ftp_connection.storbinary('STOR {}'.format(fq_filename), fh)

params = {
'download_type': 'FTP',
'file_url': 'ftp://ftp.uconn.edu/48_hour/file1.txt.bz',
'file_url': 'ftp://{}/{}'.format(self.ftp_domain, fq_filename),
}

ret1 = self.impl.download_web_file(self.ctx, params)[0]
Expand All @@ -1643,18 +1658,16 @@ def test_download_ftp_link_archive_file(self):

fq_filename = "zip1.zip"

with ftplib.FTP('ftp.uconn.edu') as ftp_connection:
with ftplib.FTP(self.ftp_domain) as ftp_connection:
ftp_connection.login('anonymous', '[email protected]')
ftp_connection.cwd("/48_hour/")

if fq_filename not in ftp_connection.nlst():
fh = open(os.path.join("data", fq_filename), 'rb')
ftp_connection.storbinary('STOR zip1.zip', fh)
fh.close()
with open(os.path.join("data", fq_filename), 'rb') as fh:
ftp_connection.storbinary('STOR {}'.format(fq_filename), fh)

params = {
'download_type': 'FTP',
'file_url': 'ftp://ftp.uconn.edu/48_hour/zip1.zip',
'file_url': 'ftp://{}/{}'.format(self.ftp_domain, fq_filename),
}

ret1 = self.impl.download_web_file(self.ctx, params)[0]
Expand Down

0 comments on commit 99129b3

Please sign in to comment.