From 5b2d9e06b9ea6d3bb913a9441e23892e2fda33b1 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 22 Nov 2023 12:50:44 +0100 Subject: [PATCH 1/3] fix sam request --- .../extractors/mdaextractors.py | 27 ++++++++++--------- .../deepinterpolation/generators.py | 16 +++++------ 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/src/spikeinterface/extractors/mdaextractors.py b/src/spikeinterface/extractors/mdaextractors.py index fb1ee60a99..e55f6b4a53 100644 --- a/src/spikeinterface/extractors/mdaextractors.py +++ b/src/spikeinterface/extractors/mdaextractors.py @@ -442,18 +442,21 @@ def is_url(path): def _download_bytes_to_tmpfile(url, start, end): - try: - import requests - except: - raise Exception("Unable to import module: requests") - headers = {"Range": "bytes={}-{}".format(start, end - 1)} - r = requests.get(url, headers=headers, stream=True) - fd, tmp_fname = tempfile.mkstemp() - os.close(fd) - with open(tmp_fname, "wb") as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: - f.write(chunk) + import requests + + headers = {"Range": f"bytes={start}-{end - 1}"} + + with requests.get(url, headers=headers, stream=True) as r: + r.raise_for_status() # Exposes HTTPError if one occurred + + with tempfile.NamedTemporaryFile(delete=False, mode="wb") as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: + f.write(chunk) + + # Store the temp file name for return + tmp_fname = f.name + return tmp_fname diff --git a/src/spikeinterface/preprocessing/deepinterpolation/generators.py b/src/spikeinterface/preprocessing/deepinterpolation/generators.py index d63080be41..0807aad8e7 100644 --- a/src/spikeinterface/preprocessing/deepinterpolation/generators.py +++ b/src/spikeinterface/preprocessing/deepinterpolation/generators.py @@ -86,11 +86,11 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - json_fd, json_path = tempfile.mkstemp(suffix=".json") - os.close(json_fd) - with open(json_path, "w") as f: + with tempfile.NamedTemporaryFile(suffix=".json") as f: json.dump(sequential_generator_params, f) - super().__init__(json_path) + f.flush() + json_path = f.name + super().__init__(json_path) self._update_end_frame(total_num_samples) @@ -245,11 +245,11 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - json_fd, json_path = tempfile.mkstemp(suffix=".json") - os.close(json_fd) - with open(json_path, "w") as f: + with tempfile.NamedTemporaryFile(suffix=".json") as f: json.dump(sequential_generator_params, f) - super().__init__(json_path) + f.flush() + json_path = f.name + super().__init__(json_path) self._update_end_frame(num_segment_samples) # IMPORTANT: this is used for inference, so we don't want to shuffle From 69afd37a3edd952960625fd6dd5ac9d24a6e9805 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 22 Nov 2023 13:45:56 +0100 Subject: [PATCH 2/3] donot delete --- .../preprocessing/deepinterpolation/generators.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/spikeinterface/preprocessing/deepinterpolation/generators.py b/src/spikeinterface/preprocessing/deepinterpolation/generators.py index 0807aad8e7..1d739948b7 100644 --- a/src/spikeinterface/preprocessing/deepinterpolation/generators.py +++ b/src/spikeinterface/preprocessing/deepinterpolation/generators.py @@ -86,11 +86,12 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - with tempfile.NamedTemporaryFile(suffix=".json") as f: + with tempfile.NamedTemporaryFile(suffix=".json", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) f.flush() json_path = f.name - super().__init__(json_path) + + super().__init__(json_path) self._update_end_frame(total_num_samples) @@ -245,11 +246,12 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - with tempfile.NamedTemporaryFile(suffix=".json") as f: + with tempfile.NamedTemporaryFile(suffix=".json", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) f.flush() json_path = f.name - super().__init__(json_path) + + super().__init__(json_path) self._update_end_frame(num_segment_samples) # IMPORTANT: this is used for inference, so we don't want to shuffle From 6e204f581b9bb0c306a0a40519458c5ee6ace95a Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 22 Nov 2023 14:15:27 +0100 Subject: [PATCH 3/3] write mode --- .../preprocessing/deepinterpolation/generators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/spikeinterface/preprocessing/deepinterpolation/generators.py b/src/spikeinterface/preprocessing/deepinterpolation/generators.py index 1d739948b7..f3587cb7ec 100644 --- a/src/spikeinterface/preprocessing/deepinterpolation/generators.py +++ b/src/spikeinterface/preprocessing/deepinterpolation/generators.py @@ -86,7 +86,7 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - with tempfile.NamedTemporaryFile(suffix=".json", delete=False, dir="/tmp") as f: + with tempfile.NamedTemporaryFile(suffix=".json", mode="w", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) f.flush() json_path = f.name @@ -246,7 +246,7 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - with tempfile.NamedTemporaryFile(suffix=".json", delete=False, dir="/tmp") as f: + with tempfile.NamedTemporaryFile(suffix=".json", mode="w", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) f.flush() json_path = f.name