From 5933adf522f41db3f7d40332a50906ba6e48ef4e Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Thu, 11 Jan 2024 12:47:43 -0800 Subject: [PATCH 01/12] Updating processor to sanity check deletion --- data-workflows/plugin/classifier_adapter.py | 3 ++ data-workflows/plugin/processor.py | 22 +++++++----- data-workflows/plugin/tests/test_processor.py | 36 ++++++++++++++++--- 3 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 data-workflows/plugin/classifier_adapter.py diff --git a/data-workflows/plugin/classifier_adapter.py b/data-workflows/plugin/classifier_adapter.py new file mode 100644 index 000000000..27639d9df --- /dev/null +++ b/data-workflows/plugin/classifier_adapter.py @@ -0,0 +1,3 @@ + +def is_plugin_live(name: str, version: str) -> bool: + pass diff --git a/data-workflows/plugin/processor.py b/data-workflows/plugin/processor.py index 697daa868..595e52f7d 100644 --- a/data-workflows/plugin/processor.py +++ b/data-workflows/plugin/processor.py @@ -2,6 +2,7 @@ from concurrent import futures from typing import Optional +from plugin.classifier_adapter import is_plugin_live from plugin.lambda_adapter import LambdaAdapter from nhcommons.models.plugin_utils import PluginMetadataType from nhcommons.utils import pypi_adapter @@ -39,15 +40,18 @@ def _is_new_plugin(plugin_version_pair): # update for removed plugins and existing older version of plugins for name, version in dynamo_latest_plugins.items(): pypi_plugin_version = pypi_latest_plugins.get(name) - if pypi_plugin_version != version: - logger.info(f"Updating old plugin={name} version={version}") - put_plugin_metadata( - plugin=name, - version=version, - plugin_metadata_type=PluginMetadataType.PYPI, - ) - if pypi_plugin_version is None: - zulip.plugin_no_longer_on_hub(name) + if pypi_plugin_version == version or \ + (pypi_plugin_version is None and is_plugin_live(name, version)): + continue + + logger.info(f"Updating old plugin={name} version={version}") + put_plugin_metadata( + plugin=name, + version=version, + plugin_metadata_type=PluginMetadataType.PYPI, + ) + if pypi_plugin_version is None: + zulip.plugin_no_longer_on_hub(name) def _update_for_new_plugin(name: str, version: str, old_version: Optional[str]) -> None: diff --git a/data-workflows/plugin/tests/test_processor.py b/data-workflows/plugin/tests/test_processor.py index c9ae5f0bb..e784d6e6f 100644 --- a/data-workflows/plugin/tests/test_processor.py +++ b/data-workflows/plugin/tests/test_processor.py @@ -3,6 +3,7 @@ import pytest import plugin.lambda_adapter +import plugin.classifier_adapter import plugin.metadata from nhcommons.models.plugin_utils import PluginMetadataType as PMType @@ -68,6 +69,15 @@ def mock_lambda_adapter(self, monkeypatch) -> Mock: monkeypatch.setattr(processor, "LambdaAdapter", mock) return mock + @pytest.fixture + def mock_classifier_adapter(self, monkeypatch) -> Mock: + mock = Mock( + side_effect=lambda _, __: self._is_plugin_live, + spec=plugin.classifier_adapter, + ) + monkeypatch.setattr(processor, "is_plugin_live", mock) + return mock + @pytest.fixture def mock_zulip(self, monkeypatch) -> Mock: mock = Mock(spec=zulip) @@ -83,6 +93,7 @@ def setup( mock_get_existing_types, mock_get_formatted_metadata, mock_lambda_adapter, + mock_classifier_adapter, mock_zulip, ) -> None: self._get_latest_plugins = mock_get_latest_plugins @@ -91,6 +102,7 @@ def setup( self._get_existing_types = mock_get_existing_types self._get_formatted_metadata = mock_get_formatted_metadata self._lambda_adapter = mock_lambda_adapter + self._classifier_adapter = mock_classifier_adapter self._zulip = mock_zulip @pytest.fixture @@ -103,6 +115,7 @@ def _verify_calls( get_formatted_metadata_called: bool = False, lambda_invoked: bool = False, put_pm_calls: list = None, + classifier_adapter_not_called: bool = True ) -> None: verify_call(True, self._get_latest_plugins, empty_call_list) verify_call(True, self._get_all_plugins, empty_call_list) @@ -125,6 +138,8 @@ def _verify_calls( default_call_list == self._lambda_adapter.return_value.invoke.call_args_list ) + if classifier_adapter_not_called: + self._classifier_adapter.assert_not_called() return _verify_calls @@ -138,16 +153,27 @@ def test_all_latest_plugin_in_dynamo(self, verify_calls): verify_calls() self._zulip.assert_not_called() - def test_stale_plugin_in_dynamo(self, verify_calls): + @pytest.mark.parametrize("is_plugin_live", [False, True]) + def test_stale_plugin_in_dynamo( + self, + is_plugin_live, + verify_calls, + ): self._dynamo_latest_plugins = {PLUGIN: VERSION, "bar": "2.4.6"} self._pypi_latest_plugins = {"bar": "2.4.6"} - put_pm_calls = [_create_put_pm_call(PMType.PYPI)] + self._is_plugin_live = is_plugin_live processor.update_plugin() - verify_calls(put_pm_calls=put_pm_calls) - assert len(self._zulip.method_calls) == 1 - self._zulip.plugin_no_longer_on_hub.assert_called_once_with(PLUGIN) + if is_plugin_live: + assert len(self._zulip.method_calls) == 0 + put_pm_calls = [] + else: + assert len(self._zulip.method_calls) == 1 + self._zulip.plugin_no_longer_on_hub.assert_called_once_with(PLUGIN) + put_pm_calls = [_create_put_pm_call(PMType.PYPI)] + verify_calls(put_pm_calls=put_pm_calls, classifier_adapter_not_called=False) + self._classifier_adapter.assert_called_once_with(PLUGIN, VERSION) @pytest.mark.parametrize( "existing_types, put_pm_data, formatted_metadata", From 80afe8e62c81bda14228055635834401c343623d Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Tue, 16 Jan 2024 11:16:04 -0800 Subject: [PATCH 02/12] Adding Classifier Adapters --- data-workflows/plugin/classifier_adapter.py | 27 +++++++- .../plugin/tests/test_classifier_adapter.py | 65 +++++++++++++++++++ .../src/nhcommons/utils/adapter_helpers.py | 6 +- 3 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 data-workflows/plugin/tests/test_classifier_adapter.py diff --git a/data-workflows/plugin/classifier_adapter.py b/data-workflows/plugin/classifier_adapter.py index 27639d9df..f8948c02b 100644 --- a/data-workflows/plugin/classifier_adapter.py +++ b/data-workflows/plugin/classifier_adapter.py @@ -1,3 +1,28 @@ +import logging +from functools import cache + +from nhcommons.utils.adapter_helpers import GithubClientHelper + + +REPOSITORY = "https://github.com/napari/npe2api" +FILE_NAME = "public/classifiers.json" +LOGGER = logging.getLogger(__name__) + + +@cache +def _get_recent_query_data() -> dict: + github_client_helper = GithubClientHelper(REPOSITORY) + data = github_client_helper.get_file(FILE_NAME, "json") + if not data: + raise RuntimeError(f"Unable to fetch {FILE_NAME} from {REPOSITORY}") + return data + def is_plugin_live(name: str, version: str) -> bool: - pass + try: + recent_query_update = _get_recent_query_data() + active_versions = set(recent_query_update.get("active", {}).get(name, [])) + return version in active_versions + except RuntimeError: + LOGGER.warning(f"Returning {name} {version} is live due to RuntimeError") + return True diff --git a/data-workflows/plugin/tests/test_classifier_adapter.py b/data-workflows/plugin/tests/test_classifier_adapter.py new file mode 100644 index 000000000..31668641d --- /dev/null +++ b/data-workflows/plugin/tests/test_classifier_adapter.py @@ -0,0 +1,65 @@ +from unittest.mock import Mock, call + +import pytest + +import nhcommons.utils.adapter_helpers +import plugin.classifier_adapter + + +REPOSITORY = "https://github.com/napari/npe2api" +FILE_NAME = "public/classifiers.json" + + +class TestClassifierAdapter: + + @pytest.fixture(autouse=True) + def clear_cache(self): + plugin.classifier_adapter._get_recent_query_data.cache_clear() + + @pytest.fixture(autouse=True) + def github_client_helper(self, monkeypatch): + self._github_client_helper = Mock() + self._github_client_helper.get_file.side_effect = lambda _, __: self._classifier_json + self._github_client_helper_call = Mock( + spec=nhcommons.utils.adapter_helpers.GithubClientHelper, + return_value=self._github_client_helper + ) + monkeypatch.setattr( + plugin.classifier_adapter, + "GithubClientHelper", + self._github_client_helper_call + ) + + def test_handle_valid_query_data(self): + self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} + assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4")) + assert (False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.4")) + self._github_client_helper_call.assert_called_once_with(REPOSITORY) + self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") + + def test_handle_invalid_query_data(self): + self._classifier_json = {"inactive": []} + assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + assert (False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.2")) + self._github_client_helper_call.assert_called_once_with(REPOSITORY) + self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") + + def test_is_plugin_live_caching(self): + self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} + assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4")) + self._github_client_helper_call.assert_called_once_with(REPOSITORY) + self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") + + def test_is_plugin_live_does_not_cache_error(self): + self._classifier_json = None + assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} + assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + self._github_client_helper_call.assert_has_calls( + [call(REPOSITORY), call(REPOSITORY)] + ) + self._github_client_helper.get_file.assert_has_calls( + [call(FILE_NAME, "json"), call(FILE_NAME, "json")] + ) diff --git a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py index 94c4573cf..4999623c1 100644 --- a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py +++ b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py @@ -1,5 +1,5 @@ import logging -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union import yaml from cffconvert import Citation @@ -47,7 +47,9 @@ def get_first_valid_file( return file return None - def get_file(self, file: str = "", file_format: str = "") -> Optional[str]: + def get_file( + self, file: str = "", file_format: str = "" + ) -> Optional[Union[str, dict]]: """ Get file from github. :param file: filename to get if specified From 6597f4554c6e8aeaf0140d5710ed15f380233050 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Tue, 16 Jan 2024 11:20:09 -0800 Subject: [PATCH 03/12] Adding back Zulip credentials --- .happy/terraform/modules/ecs-stack/main.tf | 1 + 1 file changed, 1 insertion(+) diff --git a/.happy/terraform/modules/ecs-stack/main.tf b/.happy/terraform/modules/ecs-stack/main.tf index a7267d440..e27aeeb9c 100644 --- a/.happy/terraform/modules/ecs-stack/main.tf +++ b/.happy/terraform/modules/ecs-stack/main.tf @@ -341,6 +341,7 @@ module data_workflows_lambda { "GITHUB_CLIENT_ID" = local.github_client_id "GITHUB_CLIENT_SECRET" = local.github_client_secret "PLUGINS_LAMBDA_NAME" = local.plugins_function_name + "ZULIP_CREDENTIALS" = local.zulip_credentials } log_retention_in_days = local.log_retention_period From b1c58d8883e230030ea3209a0564a152c5bbdcf1 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Tue, 16 Jan 2024 11:31:07 -0800 Subject: [PATCH 04/12] Fixing linting --- data-workflows/plugin/processor.py | 5 ++-- .../plugin/tests/test_classifier_adapter.py | 27 ++++++++++--------- data-workflows/plugin/tests/test_processor.py | 2 +- 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/data-workflows/plugin/processor.py b/data-workflows/plugin/processor.py index 595e52f7d..98f13851f 100644 --- a/data-workflows/plugin/processor.py +++ b/data-workflows/plugin/processor.py @@ -40,8 +40,9 @@ def _is_new_plugin(plugin_version_pair): # update for removed plugins and existing older version of plugins for name, version in dynamo_latest_plugins.items(): pypi_plugin_version = pypi_latest_plugins.get(name) - if pypi_plugin_version == version or \ - (pypi_plugin_version is None and is_plugin_live(name, version)): + if pypi_plugin_version == version or ( + pypi_plugin_version is None and is_plugin_live(name, version) + ): continue logger.info(f"Updating old plugin={name} version={version}") diff --git a/data-workflows/plugin/tests/test_classifier_adapter.py b/data-workflows/plugin/tests/test_classifier_adapter.py index 31668641d..8312d60dc 100644 --- a/data-workflows/plugin/tests/test_classifier_adapter.py +++ b/data-workflows/plugin/tests/test_classifier_adapter.py @@ -11,7 +11,6 @@ class TestClassifierAdapter: - @pytest.fixture(autouse=True) def clear_cache(self): plugin.classifier_adapter._get_recent_query_data.cache_clear() @@ -19,44 +18,46 @@ def clear_cache(self): @pytest.fixture(autouse=True) def github_client_helper(self, monkeypatch): self._github_client_helper = Mock() - self._github_client_helper.get_file.side_effect = lambda _, __: self._classifier_json + self._github_client_helper.get_file.side_effect = ( + lambda _, __: self._classifier_json + ) self._github_client_helper_call = Mock( spec=nhcommons.utils.adapter_helpers.GithubClientHelper, - return_value=self._github_client_helper + return_value=self._github_client_helper, ) monkeypatch.setattr( plugin.classifier_adapter, "GithubClientHelper", - self._github_client_helper_call + self._github_client_helper_call, ) def test_handle_valid_query_data(self): self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) - assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4")) - assert (False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.4")) + assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") + assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4") + assert False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.4") self._github_client_helper_call.assert_called_once_with(REPOSITORY) self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") def test_handle_invalid_query_data(self): self._classifier_json = {"inactive": []} - assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) - assert (False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.2")) + assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") + assert False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.2") self._github_client_helper_call.assert_called_once_with(REPOSITORY) self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") def test_is_plugin_live_caching(self): self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) - assert (False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4")) + assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") + assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4") self._github_client_helper_call.assert_called_once_with(REPOSITORY) self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") def test_is_plugin_live_does_not_cache_error(self): self._classifier_json = None - assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert (True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2")) + assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") self._github_client_helper_call.assert_has_calls( [call(REPOSITORY), call(REPOSITORY)] ) diff --git a/data-workflows/plugin/tests/test_processor.py b/data-workflows/plugin/tests/test_processor.py index e784d6e6f..1345fdcee 100644 --- a/data-workflows/plugin/tests/test_processor.py +++ b/data-workflows/plugin/tests/test_processor.py @@ -115,7 +115,7 @@ def _verify_calls( get_formatted_metadata_called: bool = False, lambda_invoked: bool = False, put_pm_calls: list = None, - classifier_adapter_not_called: bool = True + classifier_adapter_not_called: bool = True, ) -> None: verify_call(True, self._get_latest_plugins, empty_call_list) verify_call(True, self._get_all_plugins, empty_call_list) From 122ccbadf8e8da7b1fb52fc32eb3273f7c2efaf4 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Tue, 16 Jan 2024 11:41:26 -0800 Subject: [PATCH 05/12] Fixing linting for napari-hub-commons --- napari-hub-commons/src/nhcommons/tests/models/test_plugin.py | 2 +- napari-hub-commons/src/nhcommons/utils/adapter_helpers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/napari-hub-commons/src/nhcommons/tests/models/test_plugin.py b/napari-hub-commons/src/nhcommons/tests/models/test_plugin.py index 3190538a5..e556e6fd1 100644 --- a/napari-hub-commons/src/nhcommons/tests/models/test_plugin.py +++ b/napari-hub-commons/src/nhcommons/tests/models/test_plugin.py @@ -135,7 +135,7 @@ def seed_data(self, put_item): "2.2", release_date="2023-03-01", data=plugin_data("plugin-1", "2.2"), - is_latest=True + is_latest=True, ) put_item( "Plugin-2", diff --git a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py index 4999623c1..253cea2cb 100644 --- a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py +++ b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py @@ -48,7 +48,7 @@ def get_first_valid_file( return None def get_file( - self, file: str = "", file_format: str = "" + self, file: str = "", file_format: str = "" ) -> Optional[Union[str, dict]]: """ Get file from github. From d9e4039b4dd05c2e13a8b792fdac257ec4a9202e Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 17 Jan 2024 17:31:22 -0800 Subject: [PATCH 06/12] Installing libffi7 --- .github/workflows/backend-tests.yml | 1 + .github/workflows/dataworkflow-tests.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml index 54b926055..43692559a 100644 --- a/.github/workflows/backend-tests.yml +++ b/.github/workflows/backend-tests.yml @@ -47,6 +47,7 @@ jobs: - name: Install dependencies # need npe2 here to test preview page run: | + sudo apt install libffi7 pip install --upgrade pip pip install -r requirements.txt pip install -r test-requirements.txt diff --git a/.github/workflows/dataworkflow-tests.yml b/.github/workflows/dataworkflow-tests.yml index 6ef18c007..18b34428b 100644 --- a/.github/workflows/dataworkflow-tests.yml +++ b/.github/workflows/dataworkflow-tests.yml @@ -46,6 +46,7 @@ jobs: - name: Install dependencies run: | + sudo apt install libffi7 pip install --upgrade pip pip install -r requirements.txt pip install -r test-requirements.txt From b9b3320ae4445d2362f04b617166840572c6f75e Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 17 Jan 2024 17:34:58 -0800 Subject: [PATCH 07/12] Updating ubuntu distros --- .github/workflows/backend-tests.yml | 2 +- .github/workflows/dataworkflow-tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml index 43692559a..927450c5e 100644 --- a/.github/workflows/backend-tests.yml +++ b/.github/workflows/backend-tests.yml @@ -26,7 +26,7 @@ jobs: # Runs pytest for backend code tests: name: Unit test with pytest - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - name: Checkout Repo diff --git a/.github/workflows/dataworkflow-tests.yml b/.github/workflows/dataworkflow-tests.yml index 18b34428b..80afbb2b4 100644 --- a/.github/workflows/dataworkflow-tests.yml +++ b/.github/workflows/dataworkflow-tests.yml @@ -26,7 +26,7 @@ jobs: # Runs pytest for data-workflows code tests: name: Unit test with pytest - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - name: Checkout Repo From 22ef55cfa3e47aa549ca6fd7a56e85a0483e8060 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 7 Feb 2024 12:02:38 -0800 Subject: [PATCH 08/12] Updating tests --- data-workflows/plugin/classifier_adapter.py | 4 +-- data-workflows/plugin/processor.py | 10 ++++--- .../plugin/tests/test_classifier_adapter.py | 29 ++++++++----------- data-workflows/plugin/tests/test_processor.py | 2 +- .../src/nhcommons/utils/adapter_helpers.py | 2 +- 5 files changed, 22 insertions(+), 25 deletions(-) diff --git a/data-workflows/plugin/classifier_adapter.py b/data-workflows/plugin/classifier_adapter.py index f8948c02b..70baa8121 100644 --- a/data-workflows/plugin/classifier_adapter.py +++ b/data-workflows/plugin/classifier_adapter.py @@ -18,11 +18,11 @@ def _get_recent_query_data() -> dict: return data -def is_plugin_live(name: str, version: str) -> bool: +def is_plugin_active(name: str, version: str) -> bool: try: recent_query_update = _get_recent_query_data() active_versions = set(recent_query_update.get("active", {}).get(name, [])) return version in active_versions except RuntimeError: - LOGGER.warning(f"Returning {name} {version} is live due to RuntimeError") + LOGGER.warning(f"Returning {name} {version} is active due to RuntimeError") return True diff --git a/data-workflows/plugin/processor.py b/data-workflows/plugin/processor.py index 98f13851f..cfc1a3f23 100644 --- a/data-workflows/plugin/processor.py +++ b/data-workflows/plugin/processor.py @@ -2,7 +2,7 @@ from concurrent import futures from typing import Optional -from plugin.classifier_adapter import is_plugin_live +from plugin.classifier_adapter import is_plugin_active from plugin.lambda_adapter import LambdaAdapter from nhcommons.models.plugin_utils import PluginMetadataType from nhcommons.utils import pypi_adapter @@ -40,9 +40,11 @@ def _is_new_plugin(plugin_version_pair): # update for removed plugins and existing older version of plugins for name, version in dynamo_latest_plugins.items(): pypi_plugin_version = pypi_latest_plugins.get(name) - if pypi_plugin_version == version or ( - pypi_plugin_version is None and is_plugin_live(name, version) - ): + if pypi_plugin_version == version: + continue + if pypi_plugin_version is None and is_plugin_active(name, version): + logger.info(f"Skipping marking plugin={name} version={version} stale as " + f"the plugin is still active in npe2api") continue logger.info(f"Updating old plugin={name} version={version}") diff --git a/data-workflows/plugin/tests/test_classifier_adapter.py b/data-workflows/plugin/tests/test_classifier_adapter.py index 8312d60dc..4c9193008 100644 --- a/data-workflows/plugin/tests/test_classifier_adapter.py +++ b/data-workflows/plugin/tests/test_classifier_adapter.py @@ -5,7 +5,6 @@ import nhcommons.utils.adapter_helpers import plugin.classifier_adapter - REPOSITORY = "https://github.com/napari/npe2api" FILE_NAME = "public/classifiers.json" @@ -33,31 +32,27 @@ def github_client_helper(self, monkeypatch): def test_handle_valid_query_data(self): self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") - assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4") - assert False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.4") + assert True == plugin.classifier_adapter.is_plugin_active("foo", "1.0.2") + assert False == plugin.classifier_adapter.is_plugin_active("foo", "0.0.9") + assert False == plugin.classifier_adapter.is_plugin_active("bar", "1.0.4") self._github_client_helper_call.assert_called_once_with(REPOSITORY) self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") def test_handle_invalid_query_data(self): self._classifier_json = {"inactive": []} - assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") - assert False == plugin.classifier_adapter.is_plugin_live("bar", "1.0.2") - self._github_client_helper_call.assert_called_once_with(REPOSITORY) - self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") - - def test_is_plugin_live_caching(self): - self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") - assert False == plugin.classifier_adapter.is_plugin_live("foo", "1.0.4") + assert False == plugin.classifier_adapter.is_plugin_active("foo", "1.0.2") + assert False == plugin.classifier_adapter.is_plugin_active("bar", "1.0.2") self._github_client_helper_call.assert_called_once_with(REPOSITORY) self._github_client_helper.get_file.assert_called_once_with(FILE_NAME, "json") - def test_is_plugin_live_does_not_cache_error(self): - self._classifier_json = None - assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") + @pytest.mark.parametrize("init_classifier_json", [None, {}]) + def test_is_plugin_live_does_not_cache_error(self, init_classifier_json): + # When the classifier json is not a dict with values, _get_recent_query_data() + # should throw a RuntimeError + self._classifier_json = init_classifier_json + assert True == plugin.classifier_adapter.is_plugin_active("foo", "1.0.2") self._classifier_json = {"active": {"foo": ["1.0.0", "1.0.1", "1.0.2"]}} - assert True == plugin.classifier_adapter.is_plugin_live("foo", "1.0.2") + assert True == plugin.classifier_adapter.is_plugin_active("foo", "1.0.2") self._github_client_helper_call.assert_has_calls( [call(REPOSITORY), call(REPOSITORY)] ) diff --git a/data-workflows/plugin/tests/test_processor.py b/data-workflows/plugin/tests/test_processor.py index 1345fdcee..b2f49ba93 100644 --- a/data-workflows/plugin/tests/test_processor.py +++ b/data-workflows/plugin/tests/test_processor.py @@ -75,7 +75,7 @@ def mock_classifier_adapter(self, monkeypatch) -> Mock: side_effect=lambda _, __: self._is_plugin_live, spec=plugin.classifier_adapter, ) - monkeypatch.setattr(processor, "is_plugin_live", mock) + monkeypatch.setattr(processor, "is_plugin_active", mock) return mock @pytest.fixture diff --git a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py index 253cea2cb..2c25ca8ae 100644 --- a/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py +++ b/napari-hub-commons/src/nhcommons/utils/adapter_helpers.py @@ -63,7 +63,7 @@ def get_file( response = get_request(api_url, auth=self._auth) return response.json() if file_format == "json" else response.text except (HTTPError, JSONDecodeError): - logger.error(f"Encountered error fetching {api_url}") + logger.error(f"Encountered error fetching {api_url}", exc_info=True) return None @classmethod From 5b0f4e9e998526cfa5675c745a3fe59ffd4de278 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 7 Feb 2024 12:10:38 -0800 Subject: [PATCH 09/12] Revert "Installing libffi7" This reverts commit d9e4039b4dd05c2e13a8b792fdac257ec4a9202e. --- .github/workflows/backend-tests.yml | 1 - .github/workflows/dataworkflow-tests.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml index 927450c5e..d25dc188b 100644 --- a/.github/workflows/backend-tests.yml +++ b/.github/workflows/backend-tests.yml @@ -47,7 +47,6 @@ jobs: - name: Install dependencies # need npe2 here to test preview page run: | - sudo apt install libffi7 pip install --upgrade pip pip install -r requirements.txt pip install -r test-requirements.txt diff --git a/.github/workflows/dataworkflow-tests.yml b/.github/workflows/dataworkflow-tests.yml index 80afbb2b4..138a9fead 100644 --- a/.github/workflows/dataworkflow-tests.yml +++ b/.github/workflows/dataworkflow-tests.yml @@ -46,7 +46,6 @@ jobs: - name: Install dependencies run: | - sudo apt install libffi7 pip install --upgrade pip pip install -r requirements.txt pip install -r test-requirements.txt From e515c7480eb12cc8f8f315aa3f7f022aefaffe22 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 7 Feb 2024 12:10:59 -0800 Subject: [PATCH 10/12] Revert "Updating ubuntu distros" This reverts commit b9b3320ae4445d2362f04b617166840572c6f75e. --- .github/workflows/backend-tests.yml | 2 +- .github/workflows/dataworkflow-tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/backend-tests.yml b/.github/workflows/backend-tests.yml index d25dc188b..54b926055 100644 --- a/.github/workflows/backend-tests.yml +++ b/.github/workflows/backend-tests.yml @@ -26,7 +26,7 @@ jobs: # Runs pytest for backend code tests: name: Unit test with pytest - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Repo diff --git a/.github/workflows/dataworkflow-tests.yml b/.github/workflows/dataworkflow-tests.yml index 138a9fead..6ef18c007 100644 --- a/.github/workflows/dataworkflow-tests.yml +++ b/.github/workflows/dataworkflow-tests.yml @@ -26,7 +26,7 @@ jobs: # Runs pytest for data-workflows code tests: name: Unit test with pytest - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Repo From 9a5b8a067c5b4009226c71c437fb28971c409921 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 7 Feb 2024 12:13:24 -0800 Subject: [PATCH 11/12] Revert "Adding back Zulip credentials" This reverts commit 6597f4554c6e8aeaf0140d5710ed15f380233050. --- .happy/terraform/modules/ecs-stack/main.tf | 1 - 1 file changed, 1 deletion(-) diff --git a/.happy/terraform/modules/ecs-stack/main.tf b/.happy/terraform/modules/ecs-stack/main.tf index e27aeeb9c..a7267d440 100644 --- a/.happy/terraform/modules/ecs-stack/main.tf +++ b/.happy/terraform/modules/ecs-stack/main.tf @@ -341,7 +341,6 @@ module data_workflows_lambda { "GITHUB_CLIENT_ID" = local.github_client_id "GITHUB_CLIENT_SECRET" = local.github_client_secret "PLUGINS_LAMBDA_NAME" = local.plugins_function_name - "ZULIP_CREDENTIALS" = local.zulip_credentials } log_retention_in_days = local.log_retention_period From 59a54a400156dd8cef3ec76a17090833b16918f2 Mon Sep 17 00:00:00 2001 From: Manasa Venkatakrishnan Date: Wed, 7 Feb 2024 12:15:29 -0800 Subject: [PATCH 12/12] Fix linting --- data-workflows/plugin/processor.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/data-workflows/plugin/processor.py b/data-workflows/plugin/processor.py index cfc1a3f23..27d44fcef 100644 --- a/data-workflows/plugin/processor.py +++ b/data-workflows/plugin/processor.py @@ -43,8 +43,10 @@ def _is_new_plugin(plugin_version_pair): if pypi_plugin_version == version: continue if pypi_plugin_version is None and is_plugin_active(name, version): - logger.info(f"Skipping marking plugin={name} version={version} stale as " - f"the plugin is still active in npe2api") + logger.info( + f"Skipping marking plugin={name} version={version} stale as the " + f"plugin is still active in npe2api" + ) continue logger.info(f"Updating old plugin={name} version={version}")