diff --git a/src/spikeinterface/extractors/mdaextractors.py b/src/spikeinterface/extractors/mdaextractors.py index fb1ee60a99..e55f6b4a53 100644 --- a/src/spikeinterface/extractors/mdaextractors.py +++ b/src/spikeinterface/extractors/mdaextractors.py @@ -442,18 +442,21 @@ def is_url(path): def _download_bytes_to_tmpfile(url, start, end): - try: - import requests - except: - raise Exception("Unable to import module: requests") - headers = {"Range": "bytes={}-{}".format(start, end - 1)} - r = requests.get(url, headers=headers, stream=True) - fd, tmp_fname = tempfile.mkstemp() - os.close(fd) - with open(tmp_fname, "wb") as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: - f.write(chunk) + import requests + + headers = {"Range": f"bytes={start}-{end - 1}"} + + with requests.get(url, headers=headers, stream=True) as r: + r.raise_for_status() # Exposes HTTPError if one occurred + + with tempfile.NamedTemporaryFile(delete=False, mode="wb") as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: + f.write(chunk) + + # Store the temp file name for return + tmp_fname = f.name + return tmp_fname diff --git a/src/spikeinterface/preprocessing/deepinterpolation/generators.py b/src/spikeinterface/preprocessing/deepinterpolation/generators.py index d63080be41..f3587cb7ec 100644 --- a/src/spikeinterface/preprocessing/deepinterpolation/generators.py +++ b/src/spikeinterface/preprocessing/deepinterpolation/generators.py @@ -86,10 +86,11 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - json_fd, json_path = tempfile.mkstemp(suffix=".json") - os.close(json_fd) - with open(json_path, "w") as f: + with tempfile.NamedTemporaryFile(suffix=".json", mode="w", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) + f.flush() + json_path = f.name + super().__init__(json_path) self._update_end_frame(total_num_samples) @@ -245,10 +246,11 @@ def __init__( sequential_generator_params["total_samples"] = self.total_samples sequential_generator_params["pre_post_omission"] = pre_post_omission - json_fd, json_path = tempfile.mkstemp(suffix=".json") - os.close(json_fd) - with open(json_path, "w") as f: + with tempfile.NamedTemporaryFile(suffix=".json", mode="w", delete=False, dir="/tmp") as f: json.dump(sequential_generator_params, f) + f.flush() + json_path = f.name + super().__init__(json_path) self._update_end_frame(num_segment_samples)