diff --git a/.github/workflows/combine-dependabot-prs.yml b/.github/workflows/combine-dependabot-prs.yml index d1a9ed85ea..271072f6a2 100644 --- a/.github/workflows/combine-dependabot-prs.yml +++ b/.github/workflows/combine-dependabot-prs.yml @@ -28,6 +28,7 @@ jobs: combine-prs: # The type of runner that the job will run on runs-on: ubuntu-latest + permissions: write-all # Steps represent a sequence of tasks that will be executed as part of the job steps: diff --git a/CHANGELOG.md b/CHANGELOG.md index 631d02aeea..b50dc06980 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,26 @@ this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm +## 0.4.2 (2023-11-04) + +### Features + +- Added the current integration version to the port requests for future features and better debugging (PORT-4310) + +### Bug Fixes + +- Added the `install/prod` command to the integration scaffold template as was intended (PORT-5107) +- Changed the serializing of the port app config so when initializing it there wont be any None or default values displayed in the UI (PORT-5108) + +### Improvements + +- Removed version field from the spec.yml in the scaffolded integration (Version will be taken from the pyproject.toml) (PORT-5107) +- Changed the integration type in spec.yml to be the integration slug when scaffolding a new integration (PORT-5107) +- Added more logs to the ocean package for better debugging of the integration (PORT-4780) +- Seperated `SyncRawMixin` from `SyncRawMixin` (moved `SyncRawMixin` to `core/integrations/mixins/sync_raw.py`) +- Improved code readability for `SyncRawMixin` + + ## 0.4.1 (2023-11-03) ### Bug Fixes diff --git a/poetry.lock b/poetry.lock index 80117fadbe..84e5c688b9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "anyio" @@ -605,16 +605,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -966,7 +956,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -974,15 +963,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -999,7 +981,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1007,7 +988,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1128,6 +1108,17 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "tomlkit" version = "0.11.8" @@ -1365,4 +1356,4 @@ cli = ["click", "cookiecutter", "jinja2-time", "rich"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "5c8cfb4e252076faad62d24bdd5b03945569977407cf3c96801177e162c902bf" +content-hash = "f3a587dcadd34522382f2c755d9b5d6cab62102ef08089dd2dea604dc4f0fd11" diff --git a/port_ocean/__init__.py b/port_ocean/__init__.py index 2df89084bb..a83242a887 100644 --- a/port_ocean/__init__.py +++ b/port_ocean/__init__.py @@ -1,12 +1,11 @@ import warnings -from importlib.metadata import version + warnings.filterwarnings("ignore", category=FutureWarning) from .ocean import Ocean # noqa: E402 from .run import run # noqa: E402 - -__version__ = version("port-ocean") +from .version import __integration_version__, __version__ # noqa: E402 -__all__ = ["Ocean", "run", "__version__"] +__all__ = ["Ocean", "run", "__version__", "__integration_version__"] diff --git a/port_ocean/cli/commands/sail.py b/port_ocean/cli/commands/sail.py index 237c20687a..36cdc3f684 100644 --- a/port_ocean/cli/commands/sail.py +++ b/port_ocean/cli/commands/sail.py @@ -2,6 +2,7 @@ import click +from port_ocean import __version__, __integration_version__ from port_ocean.cli.commands.main import cli_start, print_logo, console from port_ocean.config.settings import LogLevelType @@ -62,6 +63,8 @@ def sail( print_logo() console.print("Setting sail... ⛵️⚓️⛵️⚓️ All hands on deck! ⚓️") + console.print(f"🌊 Ocean version: {__version__}") + console.print(f"🚢 Integration version: {__integration_version__}") override = {} if once: diff --git a/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml b/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml index 8e369a3eb2..c309ef4b4b 100644 --- a/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml +++ b/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml @@ -1,10 +1,9 @@ -version: v0.1.0 -type: gitlab +type: {{cookiecutter.integration_slug}} description: {{cookiecutter.integration_name}} integration for Port Ocean -icon: Cookiecutter +icon: Cookiecutter # Should be one of the available icons in Port features: - type: exporter - section: Git Providers + section: Git Providers # Should be one of the available sections in Port resources: - kind: - kind: diff --git a/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Makefile b/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Makefile index 399de69631..9a6067dc1f 100644 --- a/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Makefile +++ b/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Makefile @@ -38,13 +38,17 @@ define deactivate_virtualenv fi endef -.SILENT: install lint run test clean +.SILENT: install install/prod lint run test clean install: $(call deactivate_virtualenv) && \ $(call install_poetry) && \ poetry install --with dev +install/prod: + $(call install_poetry) && \ + poetry install --without dev --no-root --no-interaction --no-ansi --no-cache + lint: $(ACTIVATE) && \ $(call run_checks,.) diff --git a/port_ocean/clients/port/authentication.py b/port_ocean/clients/port/authentication.py index 307b421ea3..5bf4386c66 100644 --- a/port_ocean/clients/port/authentication.py +++ b/port_ocean/clients/port/authentication.py @@ -33,6 +33,7 @@ def __init__( api_url: str, integration_identifier: str, integration_type: str, + integration_version: str, ): self.client = client self.api_url = api_url @@ -40,6 +41,7 @@ def __init__( self.client_secret = client_secret self.integration_identifier = integration_identifier self.integration_type = integration_type + self.integration_version = integration_version self._last_token_object: TokenResponse | None = None async def _get_token(self, client_id: str, client_secret: str) -> TokenResponse: @@ -53,7 +55,7 @@ async def _get_token(self, client_id: str, client_secret: str) -> TokenResponse: return TokenResponse(**response.json()) def user_agent(self, user_agent_type: UserAgentType | None = None) -> str: - user_agent = f"port-ocean/{self.integration_type}/{self.integration_identifier}" + user_agent = f"port-ocean/{self.integration_type}/{self.integration_version}/{self.integration_identifier}" if user_agent_type: user_agent += f"/{user_agent_type.value or UserAgentType.exporter.value}" diff --git a/port_ocean/clients/port/client.py b/port_ocean/clients/port/client.py index a431e545f2..0d4e51f565 100644 --- a/port_ocean/clients/port/client.py +++ b/port_ocean/clients/port/client.py @@ -5,7 +5,6 @@ from port_ocean.clients.port.mixins.entities import EntityClientMixin from port_ocean.clients.port.mixins.integrations import IntegrationClientMixin from port_ocean.clients.port.mixins.migrations import MigrationClientMixin - from port_ocean.clients.port.types import ( KafkaCreds, ) @@ -26,6 +25,7 @@ def __init__( client_secret: str, integration_identifier: str, integration_type: str, + integration_version: str, ): self.api_url = f"{base_url}/v1" self.client = async_client @@ -36,10 +36,11 @@ def __init__( self.api_url, integration_identifier, integration_type, + integration_version, ) EntityClientMixin.__init__(self, self.auth, self.client) IntegrationClientMixin.__init__( - self, integration_identifier, self.auth, self.client + self, integration_identifier, integration_version, self.auth, self.client ) BlueprintClientMixin.__init__(self, self.auth, self.client) MigrationClientMixin.__init__(self, self.auth, self.client) diff --git a/port_ocean/clients/port/mixins/integrations.py b/port_ocean/clients/port/mixins/integrations.py index 66071a1812..ac399019a0 100644 --- a/port_ocean/clients/port/mixins/integrations.py +++ b/port_ocean/clients/port/mixins/integrations.py @@ -15,10 +15,12 @@ class IntegrationClientMixin: def __init__( self, integration_identifier: str, + integration_version: str, auth: PortAuthentication, client: httpx.AsyncClient, ): self.integration_identifier = integration_identifier + self.integration_version = integration_version self.auth = auth self.client = client @@ -48,6 +50,7 @@ async def create_integration( json = { "installationId": self.integration_identifier, "installationAppType": _type, + "version": self.integration_version, "changelogDestination": changelog_destination, "config": {}, } @@ -73,6 +76,7 @@ async def patch_integration( json["changelogDestination"] = changelog_destination if port_app_config: json["config"] = port_app_config.to_request() + json["version"] = self.integration_version response = await self.client.patch( f"{self.auth.api_url}/integration/{self.integration_identifier}", @@ -99,6 +103,7 @@ async def initialize_integration( if ( integration["changelogDestination"] != changelog_destination or integration["installationAppType"] != _type + or integration.get("version") != self.integration_version ): await self.patch_integration( _type, changelog_destination, port_app_config diff --git a/port_ocean/core/handlers/port_app_config/models.py b/port_ocean/core/handlers/port_app_config/models.py index 78ae10d716..b546d3a82d 100644 --- a/port_ocean/core/handlers/port_app_config/models.py +++ b/port_ocean/core/handlers/port_app_config/models.py @@ -52,7 +52,10 @@ def to_request(self) -> dict[str, Any]: "deleteDependentEntities": self.delete_dependent_entities, "createMissingRelatedEntities": self.create_missing_related_entities, "enableMergeEntity": self.enable_merge_entity, - "resources": [resource.dict(by_alias=True) for resource in self.resources], + "resources": [ + resource.dict(by_alias=True, exclude_none=True, exclude_unset=True) + for resource in self.resources + ], } class Config: diff --git a/port_ocean/core/integrations/mixins/__init__.py b/port_ocean/core/integrations/mixins/__init__.py index 5ea5fe1db8..180e52d3e5 100644 --- a/port_ocean/core/integrations/mixins/__init__.py +++ b/port_ocean/core/integrations/mixins/__init__.py @@ -1,7 +1,7 @@ from .events import EventsMixin from .handler import HandlerMixin -from .sync import SyncRawMixin, SyncMixin - +from .sync import SyncMixin +from .sync_raw import SyncRawMixin __all__ = [ "EventsMixin", diff --git a/port_ocean/core/integrations/mixins/events.py b/port_ocean/core/integrations/mixins/events.py index 07e313f436..c422d05458 100644 --- a/port_ocean/core/integrations/mixins/events.py +++ b/port_ocean/core/integrations/mixins/events.py @@ -1,5 +1,7 @@ from collections import defaultdict +from loguru import logger + from port_ocean.core.ocean_types import ( IntegrationEventsCallbacks, START_EVENT_LISTENER, @@ -26,6 +28,10 @@ def __init__(self) -> None: "resync": defaultdict(list), } + @property + def available_resync_kinds(self) -> list[str]: + return list(self.event_strategy["resync"].keys()) + def on_start(self, func: START_EVENT_LISTENER) -> START_EVENT_LISTENER: """Register a function as a listener for the "start" event. @@ -35,6 +41,7 @@ def on_start(self, func: START_EVENT_LISTENER) -> START_EVENT_LISTENER: Returns: START_EVENT_LISTENER: The input function, unchanged. """ + logger.debug(f"Registering {func} as a start event listener") self.event_strategy["start"].append(func) return func @@ -50,5 +57,9 @@ def on_resync( Returns: RESYNC_EVENT_LISTENER: The input function, unchanged. """ + if kind is None: + logger.debug(f"Registering resync event listener any kind") + else: + logger.info(f"Registering resync event listener for kind {kind}") self.event_strategy["resync"][kind].append(func) return func diff --git a/port_ocean/core/integrations/mixins/sync.py b/port_ocean/core/integrations/mixins/sync.py index fd34d15a55..9154a99b74 100644 --- a/port_ocean/core/integrations/mixins/sync.py +++ b/port_ocean/core/integrations/mixins/sync.py @@ -1,32 +1,12 @@ -import asyncio -import inspect -import typing -from typing import Any, Awaitable, Callable - from loguru import logger from port_ocean.clients.port.types import UserAgentType -from port_ocean.context.event import TriggerType, event_context, EventType, event from port_ocean.context.ocean import ocean -from port_ocean.context.resource import resource_context -from port_ocean.core.handlers.port_app_config.models import ResourceConfig -from port_ocean.core.integrations.mixins.events import EventsMixin from port_ocean.core.integrations.mixins.handler import HandlerMixin -from port_ocean.core.integrations.mixins.utils import ( - resync_function_wrapper, - resync_generator_wrapper, -) from port_ocean.core.models import Entity from port_ocean.core.ocean_types import ( - RawEntityDiff, EntityDiff, - RESYNC_RESULT, - RAW_RESULT, - RESYNC_EVENT_LISTENER, - ASYNC_GENERATOR_RESYNC_TYPE, ) -from port_ocean.core.utils import zip_and_sum -from port_ocean.exceptions.core import OceanAbortException class SyncMixin(HandlerMixin): @@ -123,325 +103,3 @@ async def sync( ) logger.info("Finished syncing change") - - -class SyncRawMixin(HandlerMixin, EventsMixin): - """Mixin class for synchronization of raw constructed entities. - - This mixin class extends the functionality of HandlerMixin and EventsMixin to provide methods for registering, - unregistering, updating, and syncing raw entities' state changes. - - Note: - Raw entities are entities with a more primitive structure, usually fetched directly from a resource. - """ - - def __init__(self) -> None: - HandlerMixin.__init__(self) - EventsMixin.__init__(self) - - async def _on_resync(self, kind: str) -> RAW_RESULT: - raise NotImplementedError("on_resync must be implemented") - - async def _calculate_raw( - self, raw_diff: list[tuple[ResourceConfig, RawEntityDiff]] - ) -> list[EntityDiff]: - logger.info("Calculating diff in entities between states") - return await asyncio.gather( - *( - self.entity_processor.parse_items(mapping, results) - for mapping, results in raw_diff - ) - ) - - async def _get_resource_raw_results( - self, resource_config: ResourceConfig - ) -> tuple[RESYNC_RESULT, list[Exception]]: - logger.info(f"Fetching {resource_config.kind} resync results") - tasks: list[Awaitable[RAW_RESULT]] = [] - with logger.contextualize(kind=resource_config.kind): - fns: list[RESYNC_EVENT_LISTENER] = [ - *self.event_strategy["resync"][resource_config.kind], - *self.event_strategy["resync"][None], - ] - - if self.__class__._on_resync != SyncRawMixin._on_resync: - fns.append(self._on_resync) - - results: RESYNC_RESULT = [] - for task in fns: - if inspect.isasyncgenfunction(task): - results.append(resync_generator_wrapper(task, resource_config.kind)) - else: - task = typing.cast(Callable[[str], Awaitable[RAW_RESULT]], task) - tasks.append(resync_function_wrapper(task, resource_config.kind)) - - logger.info( - f"Found {len(tasks) + len(results)} resync tasks for {resource_config.kind}" - ) - - results_with_error: list[RAW_RESULT | Exception] = await asyncio.gather( - *tasks, - return_exceptions=True, - ) - results.extend( - sum( - [ - result - for result in results_with_error - if not isinstance(result, Exception) - ], - [], - ) - ) - - errors = [ - result for result in results_with_error if isinstance(result, Exception) - ] - - logger.info( - f"Triggered {len(tasks)} tasks for {resource_config.kind}, failed: {len(errors)}" - ) - return results, errors - - async def _register_resource_raw( - self, - resource: ResourceConfig, - results: list[dict[Any, Any]], - user_agent_type: UserAgentType, - ) -> list[Entity]: - objects_diff = await self._calculate_raw( - [ - ( - resource, - { - "before": [], - "after": results, - }, - ) - ] - ) - - entities_after: list[Entity] = objects_diff[0]["after"] - await self.entities_state_applier.upsert(entities_after, user_agent_type) - return entities_after - - async def _unregister_resource_raw( - self, - resource: ResourceConfig, - results: list[dict[Any, Any]], - user_agent_type: UserAgentType, - ) -> list[Entity]: - objects_diff = await self._calculate_raw( - [ - ( - resource, - { - "before": results, - "after": [], - }, - ) - ] - ) - - entities_after: list[Entity] = objects_diff[0]["before"] - await self.entities_state_applier.delete(entities_after, user_agent_type) - logger.info("Finished unregistering change") - return entities_after - - async def _register_in_batches( - self, resource_config: ResourceConfig, user_agent_type: UserAgentType - ) -> tuple[list[Entity], list[Exception]]: - results, errors = await self._get_resource_raw_results(resource_config) - async_generators: list[ASYNC_GENERATOR_RESYNC_TYPE] = [] - raw_results: RAW_RESULT = [] - for result in results: - if isinstance(result, dict): - raw_results.append(result) - else: - async_generators.append(result) - - entities = await self._register_resource_raw( - resource_config, raw_results, user_agent_type - ) - - for generator in async_generators: - try: - async for items in generator: - entities.extend( - await self._register_resource_raw( - resource_config, items, user_agent_type - ) - ) - except* OceanAbortException as error: - errors.append(error) - - logger.info( - f"Finished registering change for {len(results)} raw results for kind: {resource_config.kind}. {len(entities)} entities were affected" - ) - return entities, errors - - async def register_raw( - self, - kind: str, - results: list[dict[Any, Any]], - user_agent_type: UserAgentType, - ) -> list[Entity]: - """Register raw entities of a specific kind. - - This method registers raw entities of a specific kind into Port. - - Args: - kind (str): The kind of raw entities being registered. - results (list[dict[Any, Any]]): The raw entity results to be registered. - user_agent_type (UserAgentType): The type of user agent. - - Returns: - list[Entity]: A list of registered entities. - """ - logger.info(f"Registering state for {kind}") - config = await self.port_app_config_handler.get_port_app_config() - resource_mappings = [ - resource for resource in config.resources if resource.kind == kind - ] - - return await asyncio.gather( - *( - self._register_resource_raw(resource, results, user_agent_type) - for resource in resource_mappings - ) - ) - - async def unregister_raw( - self, - kind: str, - results: list[dict[Any, Any]], - user_agent_type: UserAgentType, - ) -> list[Entity]: - """Unregister raw entities of a specific kind. - - This method unregisters raw entities of a specific kind from Port. - - Args: - kind (str): The kind of raw entities being unregistered. - results (list[dict[Any, Any]]): The raw entity results to be unregistered. - user_agent_type (UserAgentType): The type of user agent. - - Returns: - list[Entity]: A list of unregistered entities. - """ - logger.info(f"Registering state for {kind}") - config = await self.port_app_config_handler.get_port_app_config() - resource_mappings = [ - resource for resource in config.resources if resource.kind == kind - ] - - return await asyncio.gather( - *( - self._unregister_resource_raw(resource, results, user_agent_type) - for resource in resource_mappings - ) - ) - - async def update_raw_diff( - self, - kind: str, - raw_desired_state: RawEntityDiff, - user_agent_type: UserAgentType, - ) -> None: - """Update the difference in state for raw entities of a specific kind. - - This method updates the difference in state for raw entities of a specific kind. - - Args: - kind (str): The kind of raw entities being updated. - raw_desired_state (RawEntityDiff): The desired state difference of raw entities. - user_agent_type (UserAgentType): The type of user agent. - """ - logger.info(f"Updating state for {kind}") - config = await self.port_app_config_handler.get_port_app_config() - resource_mappings = [ - resource for resource in config.resources if resource.kind == kind - ] - - with logger.contextualize(kind=kind): - logger.info(f"Found {len(resource_mappings)} resources for {kind}") - - objects_diff = await self._calculate_raw( - [(mapping, raw_desired_state) for mapping in resource_mappings] - ) - - entities_before, entities_after = zip_and_sum( - ( - (entities_change["before"], entities_change["after"]) - for entities_change in objects_diff - ) - ) - - await self.entities_state_applier.apply_diff( - {"before": entities_before, "after": entities_after}, user_agent_type - ) - - async def sync_raw_all( - self, - _: dict[Any, Any] | None = None, - trigger_type: TriggerType = "machine", - user_agent_type: UserAgentType = UserAgentType.exporter, - silent: bool = True, - ) -> None: - """Perform a full synchronization of raw entities. - - This method performs a full synchronization of raw entities, including registration, unregistration, - and state updates. - - Args: - _ (dict[Any, Any] | None): Unused parameter. - trigger_type (TriggerType): The type of trigger for the synchronization. - user_agent_type (UserAgentType): The type of user agent. - silent (bool): Whether to raise exceptions or handle them silently. - """ - logger.info("Resync was triggered") - async with event_context( - EventType.RESYNC, - trigger_type=trigger_type, - ): - app_config = await self.port_app_config_handler.get_port_app_config() - - entities_at_port = await ocean.port_client.search_entities(user_agent_type) - - creation_results: list[tuple[list[Entity], list[Exception]]] = [] - - try: - for resource in app_config.resources: - # create resource context per resource kind, so resync method could have access to the resource - # config as we might have multiple resources in the same event - async with resource_context(resource): - task = asyncio.get_event_loop().create_task( - self._register_in_batches(resource, user_agent_type) - ) - - event.on_abort(lambda: task.cancel()) - - creation_results.append(await task) - except asyncio.CancelledError as e: - logger.warning("Resync aborted successfully") - else: - flat_created_entities, errors = zip_and_sum(creation_results) or [ - [], - [], - ] - - if errors: - message = f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state" - error_group = ExceptionGroup( - f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state", - errors, - ) - if not silent: - raise error_group - - logger.error(message, exc_info=error_group) - else: - await self.entities_state_applier.delete_diff( - {"before": entities_at_port, "after": flat_created_entities}, - user_agent_type, - ) diff --git a/port_ocean/core/integrations/mixins/sync_raw.py b/port_ocean/core/integrations/mixins/sync_raw.py new file mode 100644 index 0000000000..398cca9900 --- /dev/null +++ b/port_ocean/core/integrations/mixins/sync_raw.py @@ -0,0 +1,374 @@ +import asyncio +import inspect +import typing +from typing import Callable, Awaitable, Any + +from loguru import logger + +from port_ocean.clients.port.types import UserAgentType +from port_ocean.context.event import TriggerType, event_context, EventType, event +from port_ocean.context.ocean import ocean +from port_ocean.context.resource import resource_context +from port_ocean.core.handlers.port_app_config.models import ResourceConfig +from port_ocean.core.integrations.mixins import HandlerMixin, EventsMixin +from port_ocean.core.integrations.mixins.utils import ( + is_resource_supported, + unsupported_kind_response, + resync_generator_wrapper, + resync_function_wrapper, +) +from port_ocean.core.models import Entity +from port_ocean.core.ocean_types import ( + RAW_RESULT, + RESYNC_RESULT, + RawEntityDiff, + EntityDiff, + ASYNC_GENERATOR_RESYNC_TYPE, +) +from port_ocean.core.utils import zip_and_sum +from port_ocean.exceptions.core import OceanAbortException + + +class SyncRawMixin(HandlerMixin, EventsMixin): + """Mixin class for synchronization of raw constructed entities. + + This mixin class extends the functionality of HandlerMixin and EventsMixin to provide methods for registering, + unregistering, updating, and syncing raw entities' state changes. + + Note: + Raw entities are entities with a more primitive structure, usually fetched directly from a resource. + """ + + def __init__(self) -> None: + HandlerMixin.__init__(self) + EventsMixin.__init__(self) + + async def _on_resync(self, kind: str) -> RAW_RESULT: + raise NotImplementedError("on_resync must be implemented") + + async def _get_resource_raw_results( + self, resource_config: ResourceConfig + ) -> tuple[RESYNC_RESULT, list[Exception]]: + logger.info(f"Fetching {resource_config.kind} resync results") + + if not is_resource_supported( + resource_config.kind, self.event_strategy["resync"] + ): + return unsupported_kind_response( + resource_config.kind, self.available_resync_kinds + ) + + fns = self._collect_resync_functions(resource_config) + + results, errors = await self._execute_resync_tasks(fns, resource_config) + + return results, errors + + def _collect_resync_functions( + self, resource_config: ResourceConfig + ) -> list[Callable[[str], Awaitable[RAW_RESULT]]]: + logger.contextualize(kind=resource_config.kind) + + fns = [ + *self.event_strategy["resync"][resource_config.kind], + *self.event_strategy["resync"][None], + ] + + if self.__class__._on_resync != SyncRawMixin._on_resync: + fns.append(self._on_resync) + + return fns + + async def _execute_resync_tasks( + self, + fns: list[Callable[[str], Awaitable[RAW_RESULT]]], + resource_config: ResourceConfig, + ) -> tuple[RESYNC_RESULT, list[RAW_RESULT | Exception]]: + tasks = [] + results = [] + + for task in fns: + if inspect.isasyncgenfunction(task): + results.append(resync_generator_wrapper(task, resource_config.kind)) + else: + task = typing.cast(Callable[[str], Awaitable[RAW_RESULT]], task) + tasks.append(resync_function_wrapper(task, resource_config.kind)) + + logger.info( + f"Found {len(tasks) + len(results)} resync tasks for {resource_config.kind}" + ) + + results_with_error = await asyncio.gather(*tasks, return_exceptions=True) + results.extend( + sum( + [ + result + for result in results_with_error + if not isinstance(result, Exception) + ], + [], + ) + ) + errors = [ + result for result in results_with_error if isinstance(result, Exception) + ] + + logger.info( + f"Triggered {len(tasks)} tasks for {resource_config.kind}, failed: {len(errors)}" + ) + + return results, errors + + async def _calculate_raw( + self, raw_diff: list[tuple[ResourceConfig, RawEntityDiff]] + ) -> list[EntityDiff]: + logger.info("Calculating diff in entities between states") + return await asyncio.gather( + *( + self.entity_processor.parse_items(mapping, results) + for mapping, results in raw_diff + ) + ) + + async def _register_resource_raw( + self, + resource: ResourceConfig, + results: list[dict[Any, Any]], + user_agent_type: UserAgentType, + ) -> list[Entity]: + objects_diff = await self._calculate_raw( + [ + ( + resource, + { + "before": [], + "after": results, + }, + ) + ] + ) + + entities_after: list[Entity] = objects_diff[0]["after"] + await self.entities_state_applier.upsert(entities_after, user_agent_type) + return entities_after + + async def _unregister_resource_raw( + self, + resource: ResourceConfig, + results: list[dict[Any, Any]], + user_agent_type: UserAgentType, + ) -> list[Entity]: + objects_diff = await self._calculate_raw( + [ + ( + resource, + { + "before": results, + "after": [], + }, + ) + ] + ) + + entities_after: list[Entity] = objects_diff[0]["before"] + await self.entities_state_applier.delete(entities_after, user_agent_type) + logger.info("Finished unregistering change") + return entities_after + + async def _register_in_batches( + self, resource_config: ResourceConfig, user_agent_type: UserAgentType + ) -> tuple[list[Entity], list[Exception]]: + results, errors = await self._get_resource_raw_results(resource_config) + async_generators: list[ASYNC_GENERATOR_RESYNC_TYPE] = [] + raw_results: RAW_RESULT = [] + for result in results: + if isinstance(result, dict): + raw_results.append(result) + else: + async_generators.append(result) + + entities = await self._register_resource_raw( + resource_config, raw_results, user_agent_type + ) + + for generator in async_generators: + try: + async for items in generator: + entities.extend( + await self._register_resource_raw( + resource_config, items, user_agent_type + ) + ) + except* OceanAbortException as error: + errors.append(error) + + logger.info( + f"Finished registering change for {len(results)} raw results for kind: {resource_config.kind}. {len(entities)} entities were affected" + ) + return entities, errors + + async def register_raw( + self, + kind: str, + results: list[dict[Any, Any]], + user_agent_type: UserAgentType, + ) -> list[Entity]: + """Register raw entities of a specific kind. + + This method registers raw entities of a specific kind into Port. + + Args: + kind (str): The kind of raw entities being registered. + results (list[dict[Any, Any]]): The raw entity results to be registered. + user_agent_type (UserAgentType): The type of user agent. + + Returns: + list[Entity]: A list of registered entities. + """ + logger.info(f"Registering state for {kind}") + config = await self.port_app_config_handler.get_port_app_config() + resource_mappings = [ + resource for resource in config.resources if resource.kind == kind + ] + + return await asyncio.gather( + *( + self._register_resource_raw(resource, results, user_agent_type) + for resource in resource_mappings + ) + ) + + async def unregister_raw( + self, + kind: str, + results: list[dict[Any, Any]], + user_agent_type: UserAgentType, + ) -> list[Entity]: + """Unregister raw entities of a specific kind. + + This method unregisters raw entities of a specific kind from Port. + + Args: + kind (str): The kind of raw entities being unregistered. + results (list[dict[Any, Any]]): The raw entity results to be unregistered. + user_agent_type (UserAgentType): The type of user agent. + + Returns: + list[Entity]: A list of unregistered entities. + """ + logger.info(f"Registering state for {kind}") + config = await self.port_app_config_handler.get_port_app_config() + resource_mappings = [ + resource for resource in config.resources if resource.kind == kind + ] + + return await asyncio.gather( + *( + self._unregister_resource_raw(resource, results, user_agent_type) + for resource in resource_mappings + ) + ) + + async def update_raw_diff( + self, + kind: str, + raw_desired_state: RawEntityDiff, + user_agent_type: UserAgentType, + ) -> None: + """Update the difference in state for raw entities of a specific kind. + + This method updates the difference in state for raw entities of a specific kind. + + Args: + kind (str): The kind of raw entities being updated. + raw_desired_state (RawEntityDiff): The desired state difference of raw entities. + user_agent_type (UserAgentType): The type of user agent. + """ + logger.info(f"Updating state for {kind}") + config = await self.port_app_config_handler.get_port_app_config() + resource_mappings = [ + resource for resource in config.resources if resource.kind == kind + ] + + with logger.contextualize(kind=kind): + logger.info(f"Found {len(resource_mappings)} resources for {kind}") + + objects_diff = await self._calculate_raw( + [(mapping, raw_desired_state) for mapping in resource_mappings] + ) + + entities_before, entities_after = zip_and_sum( + ( + (entities_change["before"], entities_change["after"]) + for entities_change in objects_diff + ) + ) + + await self.entities_state_applier.apply_diff( + {"before": entities_before, "after": entities_after}, user_agent_type + ) + + async def sync_raw_all( + self, + _: dict[Any, Any] | None = None, + trigger_type: TriggerType = "machine", + user_agent_type: UserAgentType = UserAgentType.exporter, + silent: bool = True, + ) -> None: + """Perform a full synchronization of raw entities. + + This method performs a full synchronization of raw entities, including registration, unregistration, + and state updates. + + Args: + _ (dict[Any, Any] | None): Unused parameter. + trigger_type (TriggerType): The type of trigger for the synchronization. + user_agent_type (UserAgentType): The type of user agent. + silent (bool): Whether to raise exceptions or handle them silently. + """ + logger.info("Resync was triggered") + async with event_context( + EventType.RESYNC, + trigger_type=trigger_type, + ): + app_config = await self.port_app_config_handler.get_port_app_config() + + entities_at_port = await ocean.port_client.search_entities(user_agent_type) + + creation_results: list[tuple[list[Entity], list[Exception]]] = [] + + try: + for resource in app_config.resources: + # create resource context per resource kind, so resync method could have access to the resource + # config as we might have multiple resources in the same event + async with resource_context(resource): + task = asyncio.get_event_loop().create_task( + self._register_in_batches(resource, user_agent_type) + ) + + event.on_abort(lambda: task.cancel()) + + creation_results.append(await task) + except asyncio.CancelledError as e: + logger.warning("Resync aborted successfully") + else: + flat_created_entities, errors = zip_and_sum(creation_results) or [ + [], + [], + ] + + if errors: + message = f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state" + error_group = ExceptionGroup( + f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state", + errors, + ) + if not silent: + raise error_group + + logger.error(message, exc_info=error_group) + else: + await self.entities_state_applier.delete_diff( + {"before": entities_at_port, "after": flat_created_entities}, + user_agent_type, + ) diff --git a/port_ocean/core/integrations/mixins/utils.py b/port_ocean/core/integrations/mixins/utils.py index af58b8b33e..91a0623956 100644 --- a/port_ocean/core/integrations/mixins/utils.py +++ b/port_ocean/core/integrations/mixins/utils.py @@ -6,9 +6,15 @@ from port_ocean.core.ocean_types import ( ASYNC_GENERATOR_RESYNC_TYPE, RAW_RESULT, + RESYNC_EVENT_LISTENER, + RESYNC_RESULT, ) from port_ocean.core.utils import validate_result -from port_ocean.exceptions.core import RawObjectValidationException, OceanAbortException +from port_ocean.exceptions.core import ( + RawObjectValidationException, + OceanAbortException, + KindNotImplementedException, +) @contextmanager @@ -53,3 +59,16 @@ async def resync_generator_wrapper( raise ExceptionGroup( "At least one of the resync generator iterations failed", errors ) + + +def is_resource_supported( + kind: str, resync_event_mapping: dict[str | None, list[RESYNC_EVENT_LISTENER]] +) -> bool: + return bool(resync_event_mapping[kind] or resync_event_mapping[None]) + + +def unsupported_kind_response( + kind: str, available_resync_kinds: list[str] +) -> tuple[RESYNC_RESULT, list[Exception]]: + logger.error(f"Kind {kind} is not supported in this integration") + return [], [KindNotImplementedException(kind, available_resync_kinds)] diff --git a/port_ocean/exceptions/core.py b/port_ocean/exceptions/core.py index bbae6a4e83..7e5995adfb 100644 --- a/port_ocean/exceptions/core.py +++ b/port_ocean/exceptions/core.py @@ -5,6 +5,12 @@ class OceanAbortException(BaseOceanException): pass +class KindNotImplementedException(OceanAbortException): + def __init__(self, kind: str, available_kinds: list[str]): + base_message = f"Kind {kind} is not implemented." + super().__init__(f"{base_message} Available kinds: {available_kinds}") + + class RawObjectValidationException(OceanAbortException): pass diff --git a/port_ocean/ocean.py b/port_ocean/ocean.py index 827f90ebef..3640b43f30 100644 --- a/port_ocean/ocean.py +++ b/port_ocean/ocean.py @@ -20,6 +20,7 @@ from port_ocean.core.integrations.base import BaseIntegration from port_ocean.middlewares import request_handler from port_ocean.utils import repeat_every +from port_ocean.version import __integration_version__ class Ocean: @@ -50,6 +51,7 @@ def __init__( client_secret=self.config.port.client_secret, integration_identifier=self.config.integration.identifier, integration_type=self.config.integration.type, + integration_version=__integration_version__, ) self.integration = ( integration_class(ocean) if integration_class else BaseIntegration(ocean) diff --git a/port_ocean/utils.py b/port_ocean/utils.py index 312cb3f0e3..cb38ea191b 100644 --- a/port_ocean/utils.py +++ b/port_ocean/utils.py @@ -10,6 +10,7 @@ from typing import Callable, Any, Coroutine from uuid import uuid4 +import tomli import yaml from loguru import logger from starlette.concurrency import run_in_threadpool @@ -34,6 +35,15 @@ def get_function_location(func: Callable[..., Any]) -> str: return f"{file_path}:{line_number}" +def get_integration_version() -> str: + try: + with open("./pyproject.toml", "rb") as toml_file: + pyproject_data = tomli.load(toml_file) + return pyproject_data["tool"]["poetry"]["version"] + except (FileNotFoundError, KeyError): + return "" + + def get_spec_file(path: Path = Path(".")) -> dict[str, Any] | None: try: return yaml.safe_load((path / ".port/spec.yaml").read_text()) diff --git a/port_ocean/version.py b/port_ocean/version.py new file mode 100644 index 0000000000..45ce1ad16d --- /dev/null +++ b/port_ocean/version.py @@ -0,0 +1,6 @@ +from importlib.metadata import version + +from .utils import get_integration_version + +__version__ = version("port-ocean") +__integration_version__ = get_integration_version() diff --git a/pyproject.toml b/pyproject.toml index a7aaf4f877..5b463ca45a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "port-ocean" -version = "0.4.1" +version = "0.4.2" description = "Port Ocean is a CLI tool for managing your Port projects." readme = "README.md" homepage = "https://app.getport.io" @@ -48,13 +48,14 @@ pyjq = "^2.6.0" urllib3 = "^1.26.16" six = "^1.16.0" pyhumps = "^3.8.0" +pydispatcher = "^2.0.7" +tomli = "^2.0.1" # CLI click = { version = "^8.1.3", optional = true } rich = { version = "^13.4.1", optional = true } cookiecutter = { version = "^2.1.1", optional = true } jinja2-time = { version = "^0.2.0", optional = true } -pydispatcher = "^2.0.7" [tool.poetry.extras] cli = ["click", "rich", "cookiecutter", "jinja2-time"]