From efaf21d571262a58c0c3e624c523213be4310c43 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 03:51:44 -0500 Subject: [PATCH 01/23] docs(ingest/looker): mark platform instance as a supported capability (#9347) --- .../src/datahub/ingestion/source/looker/looker_config.py | 7 +++++-- .../src/datahub/ingestion/source/looker/looker_source.py | 2 +- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index e6ddea9a30489e..514f22b4f21580 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -9,7 +9,10 @@ from datahub.configuration import ConfigModel from datahub.configuration.common import AllowDenyPattern, ConfigurationError -from datahub.configuration.source_common import DatasetSourceConfigMixin, EnvConfigMixin +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.looker.looker_lib_wrapper import LookerAPIConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( @@ -98,7 +101,7 @@ class LookerViewNamingPattern(NamingPattern): ] -class LookerCommonConfig(DatasetSourceConfigMixin): +class LookerCommonConfig(EnvConfigMixin, PlatformInstanceConfigMixin): explore_naming_pattern: LookerNamingPattern = pydantic.Field( description=f"Pattern for providing dataset names to explores. {LookerNamingPattern.allowed_docstring()}", default=LookerNamingPattern(pattern="{model}.explore.{name}"), diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 4a98e8874bca0d..7e8fbfde120420 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -99,7 +99,7 @@ @support_status(SupportStatus.CERTIFIED) @config_class(LookerDashboardSourceConfig) @capability(SourceCapability.DESCRIPTIONS, "Enabled by default") -@capability(SourceCapability.PLATFORM_INSTANCE, "Not supported", supported=False) +@capability(SourceCapability.PLATFORM_INSTANCE, "Use the `platform_instance` field") @capability( SourceCapability.OWNERSHIP, "Enabled by default, configured using `extract_owners`" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 2bd469b3f9bcdd..4e91d17feaa9f0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -1455,7 +1455,7 @@ class LookerManifest: @support_status(SupportStatus.CERTIFIED) @capability( SourceCapability.PLATFORM_INSTANCE, - "Supported using the `connection_to_platform_map`", + "Use the `platform_instance` and `connection_to_platform_map` fields", ) @capability(SourceCapability.LINEAGE_COARSE, "Supported by default") @capability( From 65d5034a80d60f85f57a5157b730eda9d83c5516 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:50:08 +0530 Subject: [PATCH 02/23] fix(): Address HIGH vulnerability with Axios (#9353) --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 116 ++++++++++++++++----------------- 2 files changed, 58 insertions(+), 60 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 40bcad19284d9c..22c88f9647dc27 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -80,7 +80,7 @@ "reactour": "1.18.7", "remirror": "^2.0.23", "sinon": "^11.1.1", - "start-server-and-test": "1.12.2", + "start-server-and-test": "^2.0.3", "styled-components": "^5.2.1", "turndown-plugin-gfm": "^1.0.2", "typescript": "^4.8.4", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index b755281d176970..d33299a79b13a1 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -3760,14 +3760,14 @@ resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" integrity sha512-izzOXQfeQLonzrIQb8u6LQ8dk+ymz3WXTIXjvOlTXHq6sbzROg3NWU+9TTAOpEoK9Bth24/6F/XrfHJ5yR5n6Q== -"@sideway/address@^4.1.0": - version "4.1.2" - resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz#811b84333a335739d3969cfc434736268170cad1" - integrity sha512-idTz8ibqWFrPU8kMirL0CoPH/A29XOzzAzpyN3zQ4kAWnzmNfFmRaoMNN6VI8ske5M73HZyhIaW4OuSFIdM4oA== +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== dependencies: "@hapi/hoek" "^9.0.0" -"@sideway/formula@^3.0.0": +"@sideway/formula@^3.0.1": version "3.0.1" resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== @@ -5712,12 +5712,14 @@ axe-core@^4.6.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== +axios@^1.6.1: + version "1.6.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2" + integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A== dependencies: - follow-redirects "^1.14.0" + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" axobject-query@^3.1.1: version "3.2.1" @@ -7301,7 +7303,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: dependencies: ms "2.0.0" -debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -7315,13 +7317,6 @@ debug@4.1.1: dependencies: ms "^2.1.1" -debug@4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - debug@^3.2.6, debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -8232,22 +8227,7 @@ events@^3.2.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -execa@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz#4029b0007998a841fbd1032e5f4de86a3c1e3376" - integrity sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -execa@^5.0.0: +execa@5.1.1, execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== @@ -8661,11 +8641,16 @@ focus-outline-manager@^1.0.2: resolved "https://registry.yarnpkg.com/focus-outline-manager/-/focus-outline-manager-1.0.2.tgz#7bf3658865341fb6b08d042a037b9d2868b119b5" integrity sha512-bHWEmjLsTjGP9gVs7P3Hyl+oY5NlMW8aTSPdTJ+X2GKt6glDctt9fUCLbRV+d/l8NDC40+FxMjp9WlTQXaQALw== -follow-redirects@^1.0.0, follow-redirects@^1.14.0: +follow-redirects@^1.0.0: version "1.15.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== +follow-redirects@^1.15.0: + version "1.15.3" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" + integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== + for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -8695,7 +8680,7 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0: +form-data@4.0.0, form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== @@ -10711,15 +10696,15 @@ jiti@^1.18.2: resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== -joi@^17.3.0: - version "17.4.1" - resolved "https://registry.npmjs.org/joi/-/joi-17.4.1.tgz#15d2f23c8cbe4d1baded2dd190c58f8dbe11cca0" - integrity sha512-gDPOwQ5sr+BUxXuPDGrC1pSNcVR/yGGcTI0aCnjYxZEa3za60K/iCQ+OFIkEHWZGVCUcUlXlFKvMmrlmxrG6UQ== +joi@^17.11.0: + version "17.11.0" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.11.0.tgz#aa9da753578ec7720e6f0ca2c7046996ed04fc1a" + integrity sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ== dependencies: "@hapi/hoek" "^9.0.0" "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.0" - "@sideway/formula" "^3.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" js-cookie@^2.2.1: @@ -11833,7 +11818,7 @@ minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@ dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -13486,6 +13471,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -14883,13 +14873,20 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0, rxjs@^6.6.3: +rxjs@^6.3.3, rxjs@^6.6.0: version "6.6.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== dependencies: tslib "^1.9.0" +rxjs@^7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safari-14-idb-fix@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/safari-14-idb-fix/-/safari-14-idb-fix-1.0.6.tgz#cbaabc33a4500c44b5c432d6c525b0ed9b68bb65" @@ -15495,18 +15492,19 @@ stacktrace-js@^2.0.2: stack-generator "^2.0.5" stacktrace-gps "^3.0.4" -start-server-and-test@1.12.2: - version "1.12.2" - resolved "https://registry.npmjs.org/start-server-and-test/-/start-server-and-test-1.12.2.tgz#13afe6f22d7347e0fd47a739cdd085786fced14b" - integrity sha512-rjJF8N/8XVukEYR44Ehm8LAZIDjWCQKXX54W8UQ8pXz3yDKPCdqTqJy7VYnCAknPw65cmLfPxz8M2+K/zCAvVQ== +start-server-and-test@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/start-server-and-test/-/start-server-and-test-2.0.3.tgz#15c53c85e23cba7698b498b8a2598cab95f3f802" + integrity sha512-QsVObjfjFZKJE6CS6bSKNwWZCKBG6975/jKRPPGFfFh+yOQglSeGXiNWjzgQNXdphcBI9nXbyso9tPfX4YAUhg== dependencies: + arg "^5.0.2" bluebird "3.7.2" check-more-types "2.24.0" - debug "4.3.1" - execa "5.0.0" + debug "4.3.4" + execa "5.1.1" lazy-ass "1.6.0" ps-tree "1.2.0" - wait-on "5.3.0" + wait-on "7.2.0" state-local@^1.0.6: version "1.0.7" @@ -16739,16 +16737,16 @@ w3c-xmlserializer@^2.0.0: dependencies: xml-name-validator "^3.0.0" -wait-on@5.3.0: - version "5.3.0" - resolved "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz#584e17d4b3fe7b46ac2b9f8e5e102c005c2776c7" - integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== +wait-on@7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-7.2.0.tgz#d76b20ed3fc1e2bebc051fae5c1ff93be7892928" + integrity sha512-wCQcHkRazgjG5XoAq9jbTMLpNIjoSlZslrJ2+N9MxDsGEv1HnFoVjOCexL0ESva7Y9cu350j+DWADdk54s4AFQ== dependencies: - axios "^0.21.1" - joi "^17.3.0" + axios "^1.6.1" + joi "^17.11.0" lodash "^4.17.21" - minimist "^1.2.5" - rxjs "^6.6.3" + minimist "^1.2.8" + rxjs "^7.8.1" walker@^1.0.7: version "1.0.7" From ae1169d6d5831751b6d26d08052472d4adfdbf43 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:53:28 +0530 Subject: [PATCH 03/23] fix(ui): show formatted total result count in Search (#9356) --- datahub-web-react/src/app/search/SearchResults.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index b93e835970196a..11bb4945887533 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -28,6 +28,7 @@ import SearchSortSelect from './sorting/SearchSortSelect'; import { combineSiblingsInSearchResults } from './utils/combineSiblingsInSearchResults'; import SearchQuerySuggester from './suggestions/SearchQuerySugggester'; import { ANTD_GRAY_V2 } from '../entity/shared/constants'; +import { formatNumberWithoutAbbreviation } from '../shared/formatNumber'; const SearchResultsWrapper = styled.div<{ v2Styles: boolean }>` display: flex; @@ -210,7 +211,13 @@ export const SearchResults = ({ {lastResultIndex > 0 ? (page - 1) * pageSize + 1 : 0} - {lastResultIndex} {' '} - of {totalResults} results + of{' '} + + {totalResults >= 10000 + ? `${formatNumberWithoutAbbreviation(10000)}+` + : formatNumberWithoutAbbreviation(totalResults)} + {' '} + results From a7dc9c9d2292898d9668a3e39b0db42837397f94 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:11:36 -0500 Subject: [PATCH 04/23] feat(sdk): autogenerate urn types (#9257) --- docs-website/sphinx/apidocs/urns.rst | 7 + docs-website/sphinx/conf.py | 4 + docs-website/sphinx/index.rst | 1 + docs-website/sphinx/requirements.txt | 2 +- docs/how/updating-datahub.md | 52 ++- .../dataset_add_column_documentation.py | 14 +- .../library/dataset_add_column_tag.py | 14 +- .../library/dataset_add_column_term.py | 14 +- .../examples/library/upsert_group.py | 8 +- metadata-ingestion/scripts/avro_codegen.py | 407 +++++++++++++++++- .../scripts/custom_package_codegen.py | 1 + .../dataprocess/dataprocess_instance.py | 2 +- .../datahub/ingestion/source/csv_enricher.py | 5 +- .../source/metadata/business_glossary.py | 2 +- .../src/datahub/ingestion/source/tableau.py | 4 +- .../utilities/_custom_package_loader.py | 5 + .../src/datahub/utilities/docs_build.py | 9 + .../src/datahub/utilities/sqlglot_lineage.py | 14 +- .../src/datahub/utilities/urn_encoder.py | 4 + .../src/datahub/utilities/urns/_urn_base.py | 234 ++++++++++ .../datahub/utilities/urns/corp_group_urn.py | 41 +- .../datahub/utilities/urns/corpuser_urn.py | 41 +- .../datahub/utilities/urns/data_flow_urn.py | 89 +--- .../datahub/utilities/urns/data_job_urn.py | 52 +-- .../utilities/urns/data_platform_urn.py | 35 +- .../urns/data_process_instance_urn.py | 47 +- .../src/datahub/utilities/urns/dataset_urn.py | 113 +---- .../src/datahub/utilities/urns/domain_urn.py | 41 +- .../src/datahub/utilities/urns/error.py | 3 +- .../src/datahub/utilities/urns/field_paths.py | 15 + .../datahub/utilities/urns/notebook_urn.py | 47 +- .../src/datahub/utilities/urns/tag_urn.py | 41 +- .../src/datahub/utilities/urns/urn.py | 163 +------ .../src/datahub/utilities/urns/urn_iter.py | 10 +- .../state/test_checkpoint.py | 8 +- .../stateful_ingestion/test_kafka_state.py | 2 +- metadata-ingestion/tests/unit/test_urn.py | 45 -- .../unit/{ => urns}/test_corp_group_urn.py | 5 +- .../unit/{ => urns}/test_corpuser_urn.py | 5 +- .../unit/{ => urns}/test_data_flow_urn.py | 10 +- .../unit/{ => urns}/test_data_job_urn.py | 5 +- .../test_data_process_instance_urn.py | 9 +- .../tests/unit/{ => urns}/test_dataset_urn.py | 15 +- .../tests/unit/{ => urns}/test_domain_urn.py | 5 +- .../unit/{ => urns}/test_notebook_urn.py | 5 +- .../tests/unit/{ => urns}/test_tag_urn.py | 5 +- .../tests/unit/urns/test_urn.py | 56 +++ .../src/main/resources/entity-registry.yml | 4 +- 48 files changed, 856 insertions(+), 864 deletions(-) create mode 100644 docs-website/sphinx/apidocs/urns.rst create mode 100644 metadata-ingestion/src/datahub/utilities/docs_build.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/_urn_base.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/field_paths.py delete mode 100644 metadata-ingestion/tests/unit/test_urn.py rename metadata-ingestion/tests/unit/{ => urns}/test_corp_group_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_corpuser_urn.py (88%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_flow_urn.py (77%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_job_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_process_instance_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_dataset_urn.py (81%) rename metadata-ingestion/tests/unit/{ => urns}/test_domain_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_notebook_urn.py (86%) rename metadata-ingestion/tests/unit/{ => urns}/test_tag_urn.py (87%) create mode 100644 metadata-ingestion/tests/unit/urns/test_urn.py diff --git a/docs-website/sphinx/apidocs/urns.rst b/docs-website/sphinx/apidocs/urns.rst new file mode 100644 index 00000000000000..2bd70deb22c7e6 --- /dev/null +++ b/docs-website/sphinx/apidocs/urns.rst @@ -0,0 +1,7 @@ +URNs +====== + +.. automodule:: datahub.metadata.urns + :exclude-members: LI_DOMAIN, URN_PREFIX, url_encode, validate, get_type, get_entity_id, get_entity_id_as_string, get_domain, underlying_key_aspect_type + :member-order: alphabetical + :inherited-members: diff --git a/docs-website/sphinx/conf.py b/docs-website/sphinx/conf.py index 3f118aadeea819..49cd20d5ef44db 100644 --- a/docs-website/sphinx/conf.py +++ b/docs-website/sphinx/conf.py @@ -3,6 +3,10 @@ # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html +# See https://stackoverflow.com/a/65147676 +import builtins + +builtins.__sphinx_build__ = True # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information diff --git a/docs-website/sphinx/index.rst b/docs-website/sphinx/index.rst index fe11648dff555a..18d92f4053b94b 100644 --- a/docs-website/sphinx/index.rst +++ b/docs-website/sphinx/index.rst @@ -14,6 +14,7 @@ Welcome to DataHub Python SDK's documentation! apidocs/builder apidocs/clients apidocs/models + apidocs/urns Indices and tables diff --git a/docs-website/sphinx/requirements.txt b/docs-website/sphinx/requirements.txt index 94ddd40579f0e7..2e064330138d9c 100644 --- a/docs-website/sphinx/requirements.txt +++ b/docs-website/sphinx/requirements.txt @@ -1,4 +1,4 @@ --e ../../metadata-ingestion[datahub-rest,sql-parsing] +-e ../../metadata-ingestion[datahub-rest,sql-parser] beautifulsoup4==4.11.2 Sphinx==6.1.3 sphinx-click==4.4.0 diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 3263a9f7c15fb3..dad05fd0153f24 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -9,6 +9,9 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. +- #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. + The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. + ### Potential Downtime ### Deprecations @@ -23,18 +26,19 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - #9044 - GraphQL APIs for adding ownership now expect either an `ownershipTypeUrn` referencing a customer ownership type or a (deprecated) `type`. Where before adding an ownership without a concrete type was allowed, this is no longer the case. For simplicity you can use the `type` parameter which will get translated to a custom ownership type internally if one exists for the type being added. - #9010 - In Redshift source's config `incremental_lineage` is set default to off. - #8810 - Removed support for SQLAlchemy 1.3.x. Only SQLAlchemy 1.4.x is supported now. -- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted +- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted by Looker and LookML source connectors. - #8853 - The Airflow plugin no longer supports Airflow 2.0.x or Python 3.7. See the docs for more details. - #8853 - Introduced the Airflow plugin v2. If you're using Airflow 2.3+, the v2 plugin will be enabled by default, and so you'll need to switch your requirements to include `pip install 'acryl-datahub-airflow-plugin[plugin-v2]'`. To continue using the v1 plugin, set the `DATAHUB_AIRFLOW_PLUGIN_USE_V1_PLUGIN` environment variable to `true`. - #8943 - The Unity Catalog ingestion source has a new option `include_metastore`, which will cause all urns to be changed when disabled. -This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. -If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. -Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: -`datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. + This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. + If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. + Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: + `datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. - #8846 - Changed enum values in resource filters used by policies. `RESOURCE_TYPE` became `TYPE` and `RESOURCE_URN` became `URN`. -Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded -manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded + manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + ```yaml "resources": { "filter": { @@ -49,7 +53,9 @@ manually, for example by retrieving their respective `dataHubPolicyInfo` aspect ] } ``` + into + ```yaml "resources": { "filter": { @@ -64,22 +70,25 @@ into ] } ``` + for example, using `datahub put` command. Policies can be also removed and re-created via UI. + - #9077 - The BigQuery ingestion source by default sets `match_fully_qualified_names: true`. -This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully -qualified dataset name, i.e. `.`. We attempt to support the old -pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this -should not cause any issues. However, if you have a complex dataset pattern, we recommend you -manually convert it to the fully qualified format to avoid any potential issues. + This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully + qualified dataset name, i.e. `.`. We attempt to support the old + pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this + should not cause any issues. However, if you have a complex dataset pattern, we recommend you + manually convert it to the fully qualified format to avoid any potential issues. - #9110 - The Unity Catalog source will now generate urns based on `env` properly. If you have -been setting `env` in your recipe to something besides `PROD`, we will now generate urns -with that new env variable, invalidating your existing urns. + been setting `env` in your recipe to something besides `PROD`, we will now generate urns + with that new env variable, invalidating your existing urns. ### Potential Downtime ### Deprecations ### Other Notable Changes + - Session token configuration has changed, all previously created session tokens will be invalid and users will be prompted to log in. Expiration time has also been shortened which may result in more login prompts with the default settings. There should be no other interruption due to this change. @@ -88,13 +97,16 @@ with that new env variable, invalidating your existing urns. ### Breaking Changes ### Potential Downtime + - #8611 Search improvements requires reindexing indices. A `system-update` job will run which will set indices to read-only and create a backup/clone of each index. During the reindexing new components will be prevented from start-up until the reindex completes. The logs of this job will indicate a % complete per index. Depending on index sizes and infrastructure this process can take 5 minutes to hours however as a rough estimate 1 hour for every 2.3 million entities. ### Deprecations + - #8525: In LDAP ingestor, the `manager_pagination_enabled` changed to general `pagination_enabled` - MAE Events are no longer produced. MAE events have been deprecated for over a year. ### Other Notable Changes + - In this release we now enable you to create and delete pinned announcements on your DataHub homepage! If you have the “Manage Home Page Posts” platform privilege you’ll see a new section in settings called “Home Page Posts” where you can create and delete text posts and link posts that your users see on the home page. - The new search and browse experience, which was first made available in the previous release behind a feature flag, is now on by default. Check out our release notes for v0.10.5 to get more information and documentation on this new Browse experience. - In addition to the ranking changes mentioned above, this release includes changes to the highlighting of search entities to understand why they match your query. You can also sort your results alphabetically or by last updated times, in addition to relevance. In this release, we suggest a correction if your query has a typo in it. @@ -121,12 +133,13 @@ with that new env variable, invalidating your existing urns. This determines which Okta profile attribute is used for the corresponding DataHub user and thus may change what DataHub users are generated by the Okta source. And in a follow up `okta_profile_to_username_regex` has been set to `.*` which taken together with previous change brings the defaults in line with OIDC. - #8331: For all sql-based sources that support profiling, you can no longer specify -`profile_table_level_only` together with `include_field_xyz` config options to ingest -certain column-level metrics. Instead, set `profile_table_level_only` to `false` and -individually enable / disable desired field metrics. + `profile_table_level_only` together with `include_field_xyz` config options to ingest + certain column-level metrics. Instead, set `profile_table_level_only` to `false` and + individually enable / disable desired field metrics. - #8451: The `bigquery-beta` and `snowflake-beta` source aliases have been dropped. Use `bigquery` and `snowflake` as the source type instead. - #8472: Ingestion runs created with Pipeline.create will show up in the DataHub ingestion tab as CLI-based runs. To revert to the previous behavior of not showing these runs in DataHub, pass `no_default_report=True`. -- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. +- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. + ### Potential Downtime - BrowsePathsV2 upgrade will now be handled by the `system-update` job in non-blocking mode. This process generates data needed for the new search @@ -153,9 +166,11 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - #8045: With the introduction of custom ownership types, the `Owner` aspect has been updated where the `type` field is deprecated in favor of a new field `typeUrn`. This latter field is an urn reference to the new OwnershipType entity. GraphQL endpoints have been updated to use the new field. For pre-existing ownership aspect records, DataHub now has logic to map the old field to the new field. ### Other notable Changes + - #8191: Updates GMS's health check endpoint to account for its dependency on external components. Notably, at this time, elasticsearch. This means that DataHub operators can now use GMS health status more reliably. ## 0.10.3 @@ -170,6 +185,7 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - The signature of `Source.get_workunits()` is changed from `Iterable[WorkUnit]` to the more restrictive `Iterable[MetadataWorkUnit]`. - Legacy usage creation via the `UsageAggregation` aspect, `/usageStats?action=batchIngest` GMS endpoint, and `UsageStatsWorkUnit` metadata-ingestion class are all deprecated. diff --git a/metadata-ingestion/examples/library/dataset_add_column_documentation.py b/metadata-ingestion/examples/library/dataset_add_column_documentation.py index a6dbf58c09c813..bf871e2dcdb8e6 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_documentation.py +++ b/metadata-ingestion/examples/library/dataset_add_column_documentation.py @@ -14,24 +14,12 @@ EditableSchemaMetadataClass, InstitutionalMemoryClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> owner, ownership_type, dataset documentation_to_add = ( "Name of the user who was deleted. This description is updated via PythonSDK." diff --git a/metadata-ingestion/examples/library/dataset_add_column_tag.py b/metadata-ingestion/examples/library/dataset_add_column_tag.py index ede1809c7bad93..94204bc39b8746 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_tag.py +++ b/metadata-ingestion/examples/library/dataset_add_column_tag.py @@ -15,24 +15,12 @@ GlobalTagsClass, TagAssociationClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the tag to set column = "user_name" dataset_urn = make_dataset_urn(platform="hive", name="fct_users_created", env="PROD") diff --git a/metadata-ingestion/examples/library/dataset_add_column_term.py b/metadata-ingestion/examples/library/dataset_add_column_term.py index 115517bcfa06ea..9796fa9d5404ce 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_term.py +++ b/metadata-ingestion/examples/library/dataset_add_column_term.py @@ -15,24 +15,12 @@ GlossaryTermAssociationClass, GlossaryTermsClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the term to set column = "address.zipcode" dataset_urn = make_dataset_urn(platform="hive", name="realestate_db.sales", env="PROD") diff --git a/metadata-ingestion/examples/library/upsert_group.py b/metadata-ingestion/examples/library/upsert_group.py index 86a03b72c1289a..84844e142f46c0 100644 --- a/metadata-ingestion/examples/library/upsert_group.py +++ b/metadata-ingestion/examples/library/upsert_group.py @@ -5,7 +5,7 @@ CorpGroupGenerationConfig, ) from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.utilities.urns.corpuser_urn import CorpuserUrn +from datahub.metadata.urns import CorpUserUrn log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -13,10 +13,10 @@ group_email = "foogroup@acryl.io" group = CorpGroup( id=group_email, - owners=[str(CorpuserUrn.create_from_id("datahub"))], + owners=[str(CorpUserUrn("datahub"))], members=[ - str(CorpuserUrn.create_from_id("bar@acryl.io")), - str(CorpuserUrn.create_from_id("joe@acryl.io")), + str(CorpUserUrn("bar@acryl.io")), + str(CorpUserUrn("joe@acryl.io")), ], display_name="Foo Group", email=group_email, diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index de8836559217b3..c6f6bac128b790 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -1,6 +1,8 @@ import collections +import copy import json import re +import textwrap from pathlib import Path from typing import Dict, Iterable, List, Optional, Tuple, Union @@ -115,11 +117,20 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> # Patch normal urn types. field: avro.schema.Field for field in nested.fields: - java_class: Optional[str] = field.props.get("java", {}).get("class") + field_props: dict = field.props # type: ignore + java_props: dict = field_props.get("java", {}) + java_class: Optional[str] = java_props.get("class") if java_class and java_class.startswith( "com.linkedin.pegasus2avro.common.urn." ): - field.set_prop("Urn", java_class.split(".")[-1]) + type = java_class.split(".")[-1] + entity_types = field_props.get("Relationship", {}).get( + "entityTypes", [] + ) + + field.set_prop("Urn", type) + if entity_types: + field.set_prop("entityTypes", entity_types) # Patch array urn types. if nested.name in urn_arrays: @@ -130,7 +141,7 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> field.set_prop("Urn", type) field.set_prop("urn_is_array", True) - return patched.to_json() + return patched.to_json() # type: ignore def merge_schemas(schemas_obj: List[dict]) -> str: @@ -141,6 +152,7 @@ def merge_schemas(schemas_obj: List[dict]) -> str: class NamesWithDups(avro.schema.Names): def add_name(self, name_attr, space_attr, new_schema): to_add = avro.schema.Name(name_attr, space_attr, self.default_namespace) + assert to_add.fullname self.names[to_add.fullname] = new_schema return to_add @@ -228,7 +240,6 @@ def make_load_schema_methods(schemas: Iterable[str]) -> str: def save_raw_schemas(schema_save_dir: Path, schemas: Dict[str, dict]) -> None: # Save raw avsc files. - schema_save_dir.mkdir() for name, schema in schemas.items(): (schema_save_dir / f"{name}.avsc").write_text(json.dumps(schema, indent=2)) @@ -333,6 +344,342 @@ class AspectBag(TypedDict, total=False): schema_class_file.write_text("\n".join(schema_classes_lines)) +def write_urn_classes(key_aspects: List[dict], urn_dir: Path) -> None: + urn_dir.mkdir() + + (urn_dir / "__init__.py").write_text("\n# This file is intentionally left empty.") + + code = """ +# This file contains classes corresponding to entity URNs. + +from typing import ClassVar, List, Optional, Type, TYPE_CHECKING + +import functools +from deprecated.sphinx import deprecated as _sphinx_deprecated + +from datahub.utilities.urn_encoder import UrnEncoder +from datahub.utilities.urns._urn_base import _SpecificUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +deprecated = functools.partial(_sphinx_deprecated, version="0.12.0.2") +""" + + for aspect in key_aspects: + entity_type = aspect["Aspect"]["keyForEntity"] + if aspect["Aspect"]["entityCategory"] == "internal": + continue + + code += generate_urn_class(entity_type, aspect) + + (urn_dir / "urn_defs.py").write_text(code) + + +def capitalize_entity_name(entity_name: str) -> str: + # Examples: + # corpuser -> CorpUser + # corpGroup -> CorpGroup + # mlModelDeployment -> MlModelDeployment + + if entity_name == "corpuser": + return "CorpUser" + + return f"{entity_name[0].upper()}{entity_name[1:]}" + + +def python_type(avro_type: str) -> str: + if avro_type == "string": + return "str" + elif ( + isinstance(avro_type, dict) + and avro_type.get("type") == "enum" + and avro_type.get("name") == "FabricType" + ): + # TODO: make this stricter using an enum + return "str" + raise ValueError(f"unknown type {avro_type}") + + +def field_type(field: dict) -> str: + return python_type(field["type"]) + + +def field_name(field: dict) -> str: + manual_mapping = { + "origin": "env", + "platformName": "platform_name", + } + + name: str = field["name"] + if name in manual_mapping: + return manual_mapping[name] + + # If the name is mixed case, convert to snake case. + if name.lower() != name: + # Inject an underscore before each capital letter, and then convert to lowercase. + return re.sub(r"(? "{class_name}": + return cls(id) +""" +_extra_urn_methods: Dict[str, List[str]] = { + "corpGroup": [_create_from_id.format(class_name="CorpGroupUrn")], + "corpuser": [_create_from_id.format(class_name="CorpUserUrn")], + "dataFlow": [ + """ +@classmethod +def create_from_ids( + cls, + orchestrator: str, + flow_id: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DataFlowUrn": + return cls( + orchestrator=orchestrator, + flow_id=f"{platform_instance}.{flow_id}" if platform_instance else flow_id, + cluster=env, + ) + +@deprecated(reason="Use .orchestrator instead") +def get_orchestrator_name(self) -> str: + return self.orchestrator + +@deprecated(reason="Use .flow_id instead") +def get_flow_id(self) -> str: + return self.flow_id + +@deprecated(reason="Use .cluster instead") +def get_env(self) -> str: + return self.cluster +""", + ], + "dataJob": [ + """ +@classmethod +def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": + return cls(data_flow_urn, job_id) + +def get_data_flow_urn(self) -> "DataFlowUrn": + return DataFlowUrn.from_string(self.flow) + +@deprecated(reason="Use .job_id instead") +def get_job_id(self) -> str: + return self.job_id +""" + ], + "dataPlatform": [_create_from_id.format(class_name="DataPlatformUrn")], + "dataProcessInstance": [ + _create_from_id.format(class_name="DataProcessInstanceUrn"), + """ +@deprecated(reason="Use .id instead") +def get_dataprocessinstance_id(self) -> str: + return self.id +""", + ], + "dataset": [ + """ +@classmethod +def create_from_ids( + cls, + platform_id: str, + table_name: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DatasetUrn": + return DatasetUrn( + platform=platform_id, + name=f"{platform_instance}.{table_name}" if platform_instance else table_name, + env=env, + ) + +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path as _get_simple_field_path_from_v2_field_path + +get_simple_field_path_from_v2_field_path = staticmethod(deprecated(reason='Use the function from the field_paths module instead')(_get_simple_field_path_from_v2_field_path)) + +def get_data_platform_urn(self) -> "DataPlatformUrn": + return DataPlatformUrn.from_string(self.platform) + +@deprecated(reason="Use .name instead") +def get_dataset_name(self) -> str: + return self.name + +@deprecated(reason="Use .env instead") +def get_env(self) -> str: + return self.env +""" + ], + "domain": [_create_from_id.format(class_name="DomainUrn")], + "notebook": [ + """ +@deprecated(reason="Use .notebook_tool instead") +def get_platform_id(self) -> str: + return self.notebook_tool + +@deprecated(reason="Use .notebook_id instead") +def get_notebook_id(self) -> str: + return self.notebook_id +""" + ], + "tag": [_create_from_id.format(class_name="TagUrn")], +} + + +def generate_urn_class(entity_type: str, key_aspect: dict) -> str: + """Generate a class definition for this entity. + + The class definition has the following structure: + - A class attribute ENTITY_TYPE, which is the entity type string. + - A class attribute URN_PARTS, which is the number of parts in the URN. + - A constructor that takes the URN parts as arguments. The field names + will match the key aspect's field names. It will also have a _allow_coercion + flag, which will allow for some normalization (e.g. upper case env). + Then, each part will be validated (including nested calls for urn subparts). + - Utilities for converting to/from the key aspect. + - Any additional methods that are required for this entity type, defined above. + These are primarily for backwards compatibility. + - Getter methods for each field. + """ + + class_name = f"{capitalize_entity_name(entity_type)}Urn" + + fields = copy.deepcopy(key_aspect["fields"]) + if entity_type == "container": + # The annotations say guid is optional, but it is required. + # This is a quick fix of the annotations. + assert field_name(fields[0]) == "guid" + assert fields[0]["type"] == ["null", "string"] + fields[0]["type"] = "string" + + _init_arg_parts: List[str] = [] + for field in fields: + default = '"PROD"' if field_name(field) == "env" else None + _arg_part = f"{field_name(field)}: {field_type(field)}" + if default: + _arg_part += f" = {default}" + _init_arg_parts.append(_arg_part) + init_args = ", ".join(_init_arg_parts) + + super_init_args = ", ".join(field_name(field) for field in fields) + + arg_count = len(fields) + parse_ids_mapping = ", ".join( + f"{field_name(field)}=entity_ids[{i}]" for i, field in enumerate(fields) + ) + + key_aspect_class = f"{key_aspect['name']}Class" + to_key_aspect_args = ", ".join( + # The LHS bypasses any field name aliases. + f"{field['name']}=self.{field_name(field)}" + for field in fields + ) + from_key_aspect_args = ", ".join( + f"{field_name(field)}=key_aspect.{field['name']}" for field in fields + ) + + init_coercion = "" + init_validation = "" + for field in fields: + init_validation += f'if not {field_name(field)}:\n raise InvalidUrnError("{field_name(field)} cannot be empty")\n' + + # Generalized mechanism for validating embedded urns. + field_urn_type_class = None + if field_name(field) == "platform": + field_urn_type_class = "DataPlatformUrn" + elif field.get("Urn"): + if len(field.get("entityTypes", [])) == 1: + field_entity_type = field["entityTypes"][0] + field_urn_type_class = f"{capitalize_entity_name(field_entity_type)}Urn" + else: + field_urn_type_class = "Urn" + + if field_urn_type_class: + init_validation += f"{field_name(field)} = str({field_name(field)})\n" + init_validation += ( + f"assert {field_urn_type_class}.from_string({field_name(field)})\n" + ) + else: + init_validation += ( + f"assert not UrnEncoder.contains_reserved_char({field_name(field)})\n" + ) + + if field_name(field) == "env": + init_coercion += "env = env.upper()\n" + # TODO add ALL_ENV_TYPES validation + elif entity_type == "dataPlatform" and field_name(field) == "platform_name": + init_coercion += 'if platform_name.startswith("urn:li:dataPlatform:"):\n' + init_coercion += " platform_name = DataPlatformUrn.from_string(platform_name).platform_name\n" + + if field_name(field) == "platform": + init_coercion += "platform = DataPlatformUrn(platform).urn()\n" + elif field_urn_type_class is None: + # For all non-urns, run the value through the UrnEncoder. + init_coercion += ( + f"{field_name(field)} = UrnEncoder.encode_string({field_name(field)})\n" + ) + if not init_coercion: + init_coercion = "pass" + + # TODO include the docs for each field + + code = f""" +if TYPE_CHECKING: + from datahub.metadata.schema_classes import {key_aspect_class} + +class {class_name}(_SpecificUrn): + ENTITY_TYPE: ClassVar[str] = "{entity_type}" + URN_PARTS: ClassVar[int] = {arg_count} + + def __init__(self, {init_args}, *, _allow_coercion: bool = True) -> None: + if _allow_coercion: + # Field coercion logic (if any is required). +{textwrap.indent(init_coercion.strip(), prefix=" "*4*3)} + + # Validation logic. +{textwrap.indent(init_validation.strip(), prefix=" "*4*2)} + + super().__init__(self.ENTITY_TYPE, [{super_init_args}]) + + @classmethod + def _parse_ids(cls, entity_ids: List[str]) -> "{class_name}": + if len(entity_ids) != cls.URN_PARTS: + raise InvalidUrnError(f"{class_name} should have {{cls.URN_PARTS}} parts, got {{len(entity_ids)}}: {{entity_ids}}") + return cls({parse_ids_mapping}, _allow_coercion=False) + + @classmethod + def underlying_key_aspect_type(cls) -> Type["{key_aspect_class}"]: + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class} + + def to_key_aspect(self) -> "{key_aspect_class}": + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class}({to_key_aspect_args}) + + @classmethod + def from_key_aspect(cls, key_aspect: "{key_aspect_class}") -> "{class_name}": + return cls({from_key_aspect_args}) +""" + + for extra_method in _extra_urn_methods.get(entity_type, []): + code += textwrap.indent(extra_method, prefix=" " * 4) + + for i, field in enumerate(fields): + code += f""" + @property + def {field_name(field)}(self) -> {field_type(field)}: + return self.entity_ids[{i}] +""" + + return code + + @click.command() @click.argument( "entity_registry", type=click.Path(exists=True, dir_okay=False), required=True @@ -367,6 +714,7 @@ def generate( if schema.get("Aspect") } + # Copy entity registry info into the corresponding key aspect. for entity in entities: # This implicitly requires that all keyAspects are resolvable. aspect = aspects[entity.keyAspect] @@ -428,6 +776,7 @@ def generate( import importlib from typing import TYPE_CHECKING +from datahub.utilities.docs_build import IS_SPHINX_BUILD from datahub.utilities._custom_package_loader import get_custom_models_package _custom_package_path = get_custom_models_package() @@ -437,16 +786,64 @@ def generate( # Required explicitly because __all__ doesn't include _ prefixed names. from ._schema_classes import _Aspect, __SCHEMA_TYPES + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and "datahub.metadata._schema_classes" in _cls.__module__: + _cls.__module__ = __name__ else: _custom_package = importlib.import_module(_custom_package_path) globals().update(_custom_package.__dict__) +""" + ) + + (Path(outdir) / "urns.py").write_text( + """ +# This is a specialized shim layer that allows us to dynamically load custom URN types from elsewhere. + +import importlib +from typing import TYPE_CHECKING + +from datahub.utilities.docs_build import IS_SPHINX_BUILD +from datahub.utilities._custom_package_loader import get_custom_urns_package +from datahub.utilities.urns._urn_base import Urn # noqa: F401 +_custom_package_path = get_custom_urns_package() + +if TYPE_CHECKING or not _custom_package_path: + from ._urns.urn_defs import * # noqa: F401 + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and ("datahub.metadata._urns.urn_defs" in _cls.__module__ or _cls is Urn): + _cls.__module__ = __name__ +else: + _custom_package = importlib.import_module(_custom_package_path) + globals().update(_custom_package.__dict__) """ ) + # Generate URN classes. + urn_dir = Path(outdir) / "_urns" + write_urn_classes( + [aspect for aspect in aspects.values() if aspect["Aspect"].get("keyForEntity")], + urn_dir, + ) + + # Save raw schema files in codegen as well. + schema_save_dir = Path(outdir) / "schemas" + schema_save_dir.mkdir() + for schema_out_file, schema in schemas.items(): + (schema_save_dir / f"{schema_out_file}.avsc").write_text( + json.dumps(schema, indent=2) + ) + # Keep a copy of a few raw avsc files. required_avsc_schemas = {"MetadataChangeEvent", "MetadataChangeProposal"} - schema_save_dir = Path(outdir) / "schemas" save_raw_schemas( schema_save_dir, { diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 4a674550d49df0..a5883c9ae90200 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -90,6 +90,7 @@ def generate( entry_points={{ "datahub.custom_packages": [ "models={python_package_name}.models.schema_classes", + "urns={python_package_name}.models._urns.urn_defs", ], }}, ) diff --git a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py index 2f07e4a112f934..6a2f733dcf8f7d 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py +++ b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py @@ -75,7 +75,7 @@ class DataProcessInstance: def __post_init__(self): self.urn = DataProcessInstanceUrn.create_from_id( - dataprocessinstance_id=DataProcessInstanceKey( + id=DataProcessInstanceKey( cluster=self.cluster, orchestrator=self.orchestrator, id=self.id, diff --git a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py index 611f0c5c52cc65..a2db8ceb4a89a0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py +++ b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py @@ -45,6 +45,7 @@ TagAssociationClass, ) from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path from datahub.utilities.urns.urn import Urn, guess_entity_type DATASET_ENTITY_TYPE = DatasetUrn.ENTITY_TYPE @@ -436,9 +437,7 @@ def process_sub_resource_row( field_match = False for field_info in current_editable_schema_metadata.editableSchemaFieldInfo: if ( - DatasetUrn.get_simple_field_path_from_v2_field_path( - field_info.fieldPath - ) + get_simple_field_path_from_v2_field_path(field_info.fieldPath) == field_path ): # we have some editable schema metadata for this field diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index b5d9d96354fc5d..97877df63707f5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -113,7 +113,7 @@ def create_id(path: List[str], default_id: Optional[str], enable_auto_id: bool) id_: str = ".".join(path) - if UrnEncoder.contains_reserved_char(id_): + if UrnEncoder.contains_extended_reserved_char(id_): enable_auto_id = True if enable_auto_id: diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 08df7599510f47..da44d09121c6c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -1086,9 +1086,7 @@ def get_upstream_columns_of_fields_in_datasource( def is_snowflake_urn(self, urn: str) -> bool: return ( - DatasetUrn.create_from_string(urn) - .get_data_platform_urn() - .get_platform_name() + DatasetUrn.create_from_string(urn).get_data_platform_urn().platform_name == "snowflake" ) diff --git a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py index 1b66258557406d..bb029db3b65b7e 100644 --- a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py +++ b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py @@ -10,6 +10,7 @@ _CUSTOM_PACKAGE_GROUP_KEY = "datahub.custom_packages" _MODELS_KEY = "models" +_URNS_KEY = "urns" class CustomPackageException(Exception): @@ -41,3 +42,7 @@ def _get_custom_package_for_name(name: str) -> Optional[str]: def get_custom_models_package() -> Optional[str]: return _get_custom_package_for_name(_MODELS_KEY) + + +def get_custom_urns_package() -> Optional[str]: + return _get_custom_package_for_name(_URNS_KEY) diff --git a/metadata-ingestion/src/datahub/utilities/docs_build.py b/metadata-ingestion/src/datahub/utilities/docs_build.py new file mode 100644 index 00000000000000..18cb3629516ba7 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/docs_build.py @@ -0,0 +1,9 @@ +from typing import TYPE_CHECKING + +try: + # Via https://stackoverflow.com/a/65147676 + if not TYPE_CHECKING and __sphinx_build__: + IS_SPHINX_BUILD = True + +except NameError: + IS_SPHINX_BUILD = False diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index efe2d26aae3d98..c2cccf9f1e3891 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -37,7 +37,7 @@ TimeTypeClass, ) from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict -from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path logger = logging.getLogger(__name__) @@ -443,15 +443,14 @@ def _convert_schema_aspect_to_info( cls, schema_metadata: SchemaMetadataClass ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath): ( + get_simple_field_path_from_v2_field_path(col.fieldPath): ( # The actual types are more of a "nice to have". col.nativeDataType or "str" ) for col in schema_metadata.fields # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath) + if "." not in get_simple_field_path_from_v2_field_path(col.fieldPath) } @classmethod @@ -459,17 +458,14 @@ def convert_graphql_schema_metadata_to_info( cls, schema: GraphQLSchemaMetadata ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( + get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( # The actual types are more of a "nice to have". field["nativeDataType"] or "str" ) for field in schema["fields"] # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path( - field["fieldPath"] - ) + if "." not in get_simple_field_path_from_v2_field_path(field["fieldPath"]) } def close(self) -> None: diff --git a/metadata-ingestion/src/datahub/utilities/urn_encoder.py b/metadata-ingestion/src/datahub/utilities/urn_encoder.py index 706d50d9420556..093c9ade8c1528 100644 --- a/metadata-ingestion/src/datahub/utilities/urn_encoder.py +++ b/metadata-ingestion/src/datahub/utilities/urn_encoder.py @@ -23,4 +23,8 @@ def encode_char(c: str) -> str: @staticmethod def contains_reserved_char(value: str) -> bool: + return bool(set(value).intersection(RESERVED_CHARS)) + + @staticmethod + def contains_extended_reserved_char(value: str) -> bool: return bool(set(value).intersection(RESERVED_CHARS_EXTENDED)) diff --git a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py new file mode 100644 index 00000000000000..fbde0d6e6d69a5 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py @@ -0,0 +1,234 @@ +import functools +import urllib.parse +from abc import abstractmethod +from typing import ClassVar, Dict, List, Optional, Type, TypeVar + +from deprecated import deprecated + +from datahub.utilities.urns.error import InvalidUrnError + +URN_TYPES: Dict[str, Type["_SpecificUrn"]] = {} + + +def _split_entity_id(entity_id: str) -> List[str]: + if not (entity_id.startswith("(") and entity_id.endswith(")")): + return [entity_id] + + parts = [] + start_paren_count = 1 + part_start = 1 + for i in range(1, len(entity_id)): + c = entity_id[i] + if c == "(": + start_paren_count += 1 + elif c == ")": + start_paren_count -= 1 + if start_paren_count < 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + elif c == ",": + if start_paren_count != 1: + continue + + if i - part_start <= 0: + raise InvalidUrnError(f"{entity_id}, empty part disallowed") + parts.append(entity_id[part_start:i]) + part_start = i + 1 + + if start_paren_count != 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + + parts.append(entity_id[part_start:-1]) + + return parts + + +_UrnSelf = TypeVar("_UrnSelf", bound="Urn") + + +@functools.total_ordering +class Urn: + """ + URNs are globally unique identifiers used to refer to entities. + + It will be in format of urn:li:: or urn:li::(,,...) + + A note on encoding: certain characters, particularly commas and parentheses, are + not allowed in string portions of the URN. However, these are allowed when the urn + has another urn embedded within it. The main URN class ignores this possibility, + and assumes that the user provides a valid URN string. However, the specific URN + classes, such as DatasetUrn, will automatically encode these characters using + url-encoding when the URN is created and _allow_coercion is enabled (the default). + However, all from_string methods will try to preserve the string as-is, and will + raise an error if the string is invalid. + """ + + # retained for backwards compatibility + URN_PREFIX: ClassVar[str] = "urn" + LI_DOMAIN: ClassVar[str] = "li" + + _entity_type: str + _entity_ids: List[str] + + def __init__(self, entity_type: str, entity_id: List[str]) -> None: + self._entity_type = entity_type + self._entity_ids = entity_id + + if not self._entity_ids: + raise InvalidUrnError("Empty entity id.") + for part in self._entity_ids: + if not part: + raise InvalidUrnError("Empty entity id.") + + @property + def entity_type(self) -> str: + return self._entity_type + + @property + def entity_ids(self) -> List[str]: + return self._entity_ids + + @classmethod + def from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + """ + Creates an Urn from its string representation. + + Args: + urn_str: The string representation of the Urn. + + Returns: + Urn of the given string representation. + + Raises: + InvalidUrnError: If the string representation is in invalid format. + """ + + # TODO: Add handling for url encoded urns e.g. urn%3A ... + + if not urn_str.startswith("urn:li:"): + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Urns should start with 'urn:li:'" + ) + + parts: List[str] = urn_str.split(":", maxsplit=3) + if len(parts) != 4: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" + ) + if "" in parts: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." + ) + + _urn, _li, entity_type, entity_ids_str = parts + entity_ids = _split_entity_id(entity_ids_str) + + UrnCls: Optional[Type["_SpecificUrn"]] = URN_TYPES.get(entity_type) + if UrnCls: + if not issubclass(UrnCls, cls): + # We want to return a specific subtype of Urn. If we're called + # with Urn.from_string(), that's fine. However, if we're called as + # DatasetUrn.from_string('urn:li:corpuser:foo'), that should throw an error. + raise InvalidUrnError( + f"Passed an urn of type {entity_type} to the from_string method of {cls.__name__}. Use Urn.from_string() or {UrnCls.__name__}.from_string() instead." + ) + return UrnCls._parse_ids(entity_ids) # type: ignore + + # Fallback for unknown types. + if cls != Urn: + raise InvalidUrnError( + f"Unknown urn type {entity_type} for urn {urn_str} of type {cls}" + ) + return cls(entity_type, entity_ids) + + def urn(self) -> str: + """Get the string representation of the urn.""" + + if len(self._entity_ids) == 1: + return f"urn:li:{self._entity_type}:{self._entity_ids[0]}" + + return f"urn:li:{self._entity_type}:({','.join(self._entity_ids)})" + + def __str__(self) -> str: + return self.urn() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.urn()})" + + def urn_url_encoded(self) -> str: + return Urn.url_encode(self.urn()) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Urn): + return False + return self.urn() == other.urn() + + def __lt__(self, other: object) -> bool: + if not isinstance(other, Urn): + raise TypeError( + f"'<' not supported between instances of '{type(self)}' and '{type(other)}'" + ) + return self.urn() < other.urn() + + def __hash__(self) -> int: + return hash(self.urn()) + + @classmethod + @deprecated(reason="prefer .from_string") + def create_from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + return cls.from_string(urn_str) + + @deprecated(reason="prefer .entity_ids") + def get_entity_id(self) -> List[str]: + return self._entity_ids + + @deprecated(reason="prefer .entity_type") + def get_type(self) -> str: + return self._entity_type + + @deprecated(reason="no longer needed") + def get_domain(self) -> str: + return "li" + + @deprecated(reason="no longer needed") + def get_entity_id_as_string(self) -> str: + urn = self.urn() + prefix = "urn:li:" + assert urn.startswith(prefix) + id_with_type = urn[len(prefix) :] + return id_with_type.split(":", maxsplit=1)[1] + + @classmethod + @deprecated(reason="no longer needed") + def validate(cls, urn_str: str) -> None: + Urn.create_from_string(urn_str) + + @staticmethod + def url_encode(urn: str) -> str: + # safe='' encodes '/' as '%2F' + return urllib.parse.quote(urn, safe="") + + +class _SpecificUrn(Urn): + ENTITY_TYPE: str = "" + + def __init_subclass__(cls) -> None: + # Validate the subclass. + entity_type = cls.ENTITY_TYPE + if not entity_type: + raise ValueError(f'_SpecificUrn subclass {cls} must define "ENTITY_TYPE"') + + # Register the urn type. + if entity_type in URN_TYPES: + raise ValueError(f"duplicate urn type registered: {entity_type}") + URN_TYPES[entity_type] = cls + + return super().__init_subclass__() + + @classmethod + def underlying_key_aspect_type(cls) -> Type: + raise NotImplementedError() + + @classmethod + @abstractmethod + def _parse_ids(cls: Type[_UrnSelf], entity_ids: List[str]) -> _UrnSelf: + raise NotImplementedError() diff --git a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py index 94fa133becf6cf..37c10769259459 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpGroupUrn(Urn): - """ - expected corp group urn format: urn:li:corpGroup:. example: "urn:li:corpGroup:data" - """ - - ENTITY_TYPE: str = "corpGroup" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpGroupUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, group_id: str) -> "CorpGroupUrn": - return cls(CorpGroupUrn.ENTITY_TYPE, [group_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpGroupUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpGroupUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpGroupUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py index 653b99f4af9bf3..5f9ecf65951b95 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpuserUrn(Urn): - """ - expected corp user urn format: urn:li:corpuser:. example: "urn:li:corpuser:tom" - """ - - ENTITY_TYPE: str = "corpuser" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpuserUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, user_id: str) -> "CorpuserUrn": - return cls(CorpuserUrn.ENTITY_TYPE, [user_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpuserUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpuserUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpUserUrn as CorpuserUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py index f0dda5d8db4932..5b2b45927c339e 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py @@ -1,88 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataFlowUrn(Urn): - """ - expected data flow urn format: urn:li:dataFlow:(,,). example: - urn:li:dataFlow:(airflow,ingest_user,prod) - """ - - ENTITY_TYPE: str = "dataFlow" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataFlowUrn": - """ - Create a DataFlowUrn from the its string representation - :param urn_str: the string representation of the DataFlowUrn - :return: DataFlowUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_orchestrator_name(self) -> str: - """ - :return: the orchestrator name for the Dataflow - """ - return self.get_entity_id()[0] - - def get_flow_id(self) -> str: - """ - :return: the data flow id from this DataFlowUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the DataFlow is run - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - orchestrator: str, - flow_id: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DataFlowUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - orchestrator, - f"{platform_instance}.{flow_id}", - env, - ] - else: - entity_id = [orchestrator, flow_id, env] - return cls(DataFlowUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataFlowUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataFlowUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) +from datahub.metadata.urns import DataFlowUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py index 9459646893b927..53e3419ee7ecb2 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py @@ -1,51 +1 @@ -from typing import List - -from datahub.utilities.urns.data_flow_urn import DataFlowUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataJobUrn(Urn): - """ - expected Data job urn format: urn:li:dataJob:(,). example: - "urn:li:dataJob:(urn:li:dataFlow:(airflow,sample_flow,prod),sample_job)" - """ - - ENTITY_TYPE: str = "dataJob" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - def get_data_flow_urn(self) -> DataFlowUrn: - return DataFlowUrn.create_from_string(self.get_entity_id()[0]) - - def get_job_id(self) -> str: - return self.get_entity_id()[1] - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataJobUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": - return cls(DataJobUrn.ENTITY_TYPE, [data_flow_urn, job_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataJobUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataJobUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 part in entity id, but found{len(entity_id)}" - ) - - data_flow_urn_str = entity_id[0] - DataFlowUrn.validate(data_flow_urn_str) +from datahub.metadata.urns import DataJobUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py index 79cf54dfe920a0..9d37e38f256e7f 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py @@ -1,34 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataPlatformUrn(Urn): - """ - expected dataset urn format: urn:li:dataPlatform:. example: "urn:li:dataPlatform:hive" - """ - - ENTITY_TYPE: str = "dataPlatform" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataPlatformUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, platform_id: str) -> "DataPlatformUrn": - return cls(DataPlatformUrn.ENTITY_TYPE, [platform_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataPlatformUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataPlatformUrn.ENTITY_TYPE} but found {entity_type}" - ) - - def get_platform_name(self) -> str: - return self.get_entity_id()[0] +from datahub.metadata.urns import DataPlatformUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py index 6367d48d6d4413..df6ba797d069c1 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataProcessInstanceUrn(Urn): - """ - expected domain urn format: urn:li:dataProcessInstance: - """ - - ENTITY_TYPE: str = "dataProcessInstance" - - def __init__( - self, entity_type: str, entity_id: List[str], domain_id: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain_id) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataProcessInstanceUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, dataprocessinstance_id: str) -> "DataProcessInstanceUrn": - return cls(DataProcessInstanceUrn.ENTITY_TYPE, [dataprocessinstance_id]) - - def get_dataprocessinstance_id(self) -> str: - """ - :return: the dataprocess instance id from this DatasetUrn - """ - return self.get_entity_id()[0] - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataProcessInstanceUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataProcessInstanceUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DataProcessInstanceUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py index 3ed33c068496eb..6078ffefc03d85 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py @@ -1,112 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urn_encoder import UrnEncoder -from datahub.utilities.urns.data_platform_urn import DataPlatformUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DatasetUrn(Urn): - """ - expected dataset urn format: urn:li:dataset:(,,env). example: - urn:li:dataset:(urn:li:dataPlatform:hive,member,prod) - """ - - ENTITY_TYPE: str = "dataset" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, UrnEncoder.encode_string_array(entity_id), domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DatasetUrn": - """ - Create a DatasetUrn from the its string representation - :param urn_str: the string representation of the DatasetUrn - :return: DatasetUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_data_platform_urn(self) -> DataPlatformUrn: - """ - :return: the DataPlatformUrn of where the Dataset is created - """ - return DataPlatformUrn.create_from_string(self.get_entity_id()[0]) - - def get_dataset_name(self) -> str: - """ - :return: the dataset name from this DatasetUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the Dataset is created - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - platform_id: str, - table_name: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DatasetUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - f"{platform_instance}.{table_name}", - env, - ] - else: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - table_name, - env, - ] - return cls(DatasetUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DatasetUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DatasetUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - platform_urn_str = entity_id[0] - - DataPlatformUrn.validate(platform_urn_str) - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) - - """A helper function to extract simple . path notation from the v2 field path""" - - @staticmethod - def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - if field_path.startswith("[version=2.0]"): - # this is a v2 field path - tokens = [ - t - for t in field_path.split(".") - if not (t.startswith("[") or t.endswith("]")) - ] - path = ".".join(tokens) - return path - else: - # not a v2, we assume this is a simple path - return field_path +from datahub.metadata.urns import DatasetUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py index dc875ce84f9737..442a6b27729bba 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DomainUrn(Urn): - """ - expected domain urn format: urn:li:domain:. example: "urn:li:domain:product" - """ - - ENTITY_TYPE: str = "domain" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DomainUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, domain_id: str) -> "DomainUrn": - return cls(DomainUrn.ENTITY_TYPE, [domain_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DomainUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DomainUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DomainUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/error.py b/metadata-ingestion/src/datahub/utilities/urns/error.py index 12b7c02ab2d9a2..a5c17c40787cad 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/error.py +++ b/metadata-ingestion/src/datahub/utilities/urns/error.py @@ -1,3 +1,2 @@ class InvalidUrnError(Exception): - def __init__(self, msg: str): - super().__init__(msg) + pass diff --git a/metadata-ingestion/src/datahub/utilities/urns/field_paths.py b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py new file mode 100644 index 00000000000000..c2ecfa30311409 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py @@ -0,0 +1,15 @@ +def get_simple_field_path_from_v2_field_path(field_path: str) -> str: + """A helper function to extract simple . path notation from the v2 field path""" + + if field_path.startswith("[version=2.0]"): + # this is a v2 field path + tokens = [ + t + for t in field_path.split(".") + if not (t.startswith("[") or t.endswith("]")) + ] + path = ".".join(tokens) + return path + else: + # not a v2, we assume this is a simple path + return field_path diff --git a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py index fcf2c924503091..60a4f5396aa468 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class NotebookUrn(Urn): - """ - expected dataset urn format: urn:li:notebook:(,). example: "urn:li:notebook:(querybook,1234)" - """ - - ENTITY_TYPE: str = "notebook" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "NotebookUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, platform_id: str, notebook_id: str) -> "NotebookUrn": - return cls(NotebookUrn.ENTITY_TYPE, [platform_id, notebook_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != NotebookUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {NotebookUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 parts in entity id, but found{len(entity_id)}" - ) - - def get_platform_id(self) -> str: - return self.get_entity_id()[0] - - def get_notebook_id(self) -> str: - return self.get_entity_id()[1] +from datahub.metadata.urns import NotebookUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py index e2baeea45e8075..0ac632ee40a015 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TagUrn(Urn): - """ - expected tag urn format: urn:li:tag:. example: "urn:li:tag:product" - """ - - ENTITY_TYPE: str = "tag" - - def __init__( - self, entity_type: str, entity_id: List[str], tag: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, tag) - - @classmethod - def create_from_string(cls, urn_str: str) -> "TagUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, tag_id: str) -> "TagUrn": - return cls(TagUrn.ENTITY_TYPE, [tag_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != TagUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {TagUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import TagUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn.py b/metadata-ingestion/src/datahub/utilities/urns/urn.py index db6898d55ad2b3..2e5cebfd0e8f55 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn.py @@ -1,167 +1,6 @@ -import urllib.parse -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError +from datahub.metadata.urns import Urn # noqa: F401 def guess_entity_type(urn: str) -> str: assert urn.startswith("urn:li:"), "urns must start with urn:li:" return urn.split(":")[2] - - -class Urn: - """ - URNs are Globally Unique Identifiers (GUID) used to represent an entity. - It will be in format of urn::: - """ - - URN_PREFIX: str = "urn" - # all the Datahub urn use li domain for now. - LI_DOMAIN: str = "li" - - _entity_type: str - _domain: str - _entity_id: List[str] - - def __init__( - self, entity_type: str, entity_id: List[str], urn_domain: str = LI_DOMAIN - ): - if not entity_id: - raise InvalidUrnError("Empty entity id.") - self._validate_entity_type(entity_type) - self._validate_entity_id(entity_id) - self._entity_type = entity_type - self._domain = urn_domain - self._entity_id = entity_id - - @classmethod - def create_from_string(cls, urn_str: str) -> "Urn": - """ - Create a Urn from the its string representation - :param urn_str: the string representation of the Urn - :return: Urn of the given string representation - :raises InvalidUrnError if the string representation is in invalid format - """ - - # expect urn string in format of urn::: - cls.validate(urn_str) - parts: List[str] = urn_str.split(":", 3) - - return cls(parts[2], cls._get_entity_id_from_str(parts[3]), parts[1]) - - @classmethod - def validate(cls, urn_str: str) -> None: - """ - Validate if a string is in valid Urn format - :param urn_str: to be validated urn string - :raises InvalidUrnError if the string representation is in invalid format - """ - parts: List[str] = urn_str.split(":", 3) - if len(parts) != 4: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" - ) - - if "" in parts: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." - ) - - if parts[0] != Urn.URN_PREFIX: - raise InvalidUrnError( - f'Invalid urn string: {urn_str}. Expect urn starting with "urn" but found {parts[0]}' - ) - - if "" in cls._get_entity_id_from_str(parts[3]): - raise InvalidUrnError( - f"Invalid entity id in urn string: {urn_str}. There should not be empty parts in entity id." - ) - - cls._validate_entity_type(parts[2]) - cls._validate_entity_id(cls._get_entity_id_from_str(parts[3])) - - @staticmethod - def url_encode(urn: str) -> str: - # safe='' encodes '/' as '%2F' - return urllib.parse.quote(urn, safe="") - - def get_type(self) -> str: - return self._entity_type - - def get_entity_id(self) -> List[str]: - return self._entity_id - - def get_entity_id_as_string(self) -> str: - """ - :return: string representation of the entity ids. If there are more than one part in the entity id part, it will - return in this format (,,...) - """ - return self._entity_id_to_string() - - def get_domain(self) -> str: - return self._domain - - @staticmethod - def _get_entity_id_from_str(entity_id: str) -> List[str]: - if not (entity_id.startswith("(") and entity_id.endswith(")")): - return [entity_id] - - parts = [] - start_paren_count = 1 - part_start = 1 - for i in range(1, len(entity_id)): - c = entity_id[i] - if c == "(": - start_paren_count += 1 - elif c == ")": - start_paren_count -= 1 - if start_paren_count < 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - elif c == ",": - if start_paren_count != 1: - continue - - if i - part_start <= 0: - raise InvalidUrnError(f"{entity_id}, empty part disallowed") - parts.append(entity_id[part_start:i]) - part_start = i + 1 - - if start_paren_count != 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - - parts.append(entity_id[part_start:-1]) - - return parts - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - pass - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - pass - - def __str__(self) -> str: - return f"{self.URN_PREFIX}:{self._domain}:{self._entity_type}:{self._entity_id_to_string()}" - - def _entity_id_to_string(self) -> str: - if len(self._entity_id) == 1: - return self._entity_id[0] - result = "" - for part in self._entity_id: - result = result + str(part) + "," - return f"({result[:-1]})" - - def __hash__(self) -> int: - return hash((self._domain, self._entity_type) + tuple(self._entity_id)) - - def __eq__(self, other: object) -> bool: - return ( - ( - self._entity_id == other._entity_id - and self._domain == other._domain - and self._entity_type == other._entity_type - ) - if isinstance(other, Urn) - else False - ) diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 169a4ac3649a33..4f228494f416b8 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -131,9 +131,11 @@ def _modify_at_path( def _lowercase_dataset_urn(dataset_urn: str) -> str: - cur_urn = DatasetUrn.create_from_string(dataset_urn) - cur_urn._entity_id[1] = cur_urn._entity_id[1].lower() - return str(cur_urn) + cur_urn = DatasetUrn.from_string(dataset_urn) + new_urn = DatasetUrn( + platform=cur_urn.platform, name=cur_urn.name.lower(), env=cur_urn.env + ) + return str(new_urn) def lowercase_dataset_urns( @@ -149,7 +151,7 @@ def modify_urn(urn: str) -> str: return _lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": cur_urn = Urn.create_from_string(urn) - cur_urn._entity_id[0] = _lowercase_dataset_urn(cur_urn._entity_id[0]) + cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py index 712ae2066b728d..ecea3183393453 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py @@ -4,7 +4,6 @@ import pydantic import pytest -from datahub.emitter.mce_builder import make_dataset_urn from datahub.ingestion.source.state.checkpoint import Checkpoint, CheckpointStateBase from datahub.ingestion.source.state.sql_common_state import ( BaseSQLAlchemyCheckpointState, @@ -59,12 +58,15 @@ def _assert_checkpoint_deserialization( def _make_sql_alchemy_checkpoint_state() -> BaseSQLAlchemyCheckpointState: + # Note that the urns here purposely use a lowercase env, even though it's + # technically incorrect. This is purely for backwards compatibility testing, but + # all existing code uses correctly formed envs. base_sql_alchemy_checkpoint_state_obj = BaseSQLAlchemyCheckpointState() base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="table", urn=make_dataset_urn("mysql", "db1.t1", "prod") + type="table", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.t1,prod)" ) base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="view", urn=make_dataset_urn("mysql", "db1.v1", "prod") + type="view", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.v1,prod)" ) return base_sql_alchemy_checkpoint_state_obj diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py index f4517ba2df9c93..3b0e4e31d4b4a2 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py @@ -25,6 +25,6 @@ def test_kafka_state_migration() -> None: } ) assert state.urns == [ - "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,test)", + "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,TEST)", "urn:li:dataset:(urn:li:dataPlatform:kafka,topic_2,DEV)", ] diff --git a/metadata-ingestion/tests/unit/test_urn.py b/metadata-ingestion/tests/unit/test_urn.py deleted file mode 100644 index 8bab01e437fdbd..00000000000000 --- a/metadata-ingestion/tests/unit/test_urn.py +++ /dev/null @@ -1,45 +0,0 @@ -import unittest - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TestUrn(unittest.TestCase): - def test_parse_urn(self) -> None: - simple_urn_str = "urn:li:dataPlatform:abc" - urn = Urn.create_from_string(simple_urn_str) - assert urn.get_entity_id_as_string() == "abc" - assert urn.get_entity_id() == ["abc"] - assert urn.get_type() == "dataPlatform" - assert urn.get_domain() == "li" - assert urn.__str__() == simple_urn_str - assert urn == Urn("dataPlatform", ["abc"]) - - complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - urn = Urn.create_from_string(complex_urn_str) - assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" - assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] - assert urn.get_type() == "dataset" - assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - - def test_url_encode_urn(self) -> None: - urn_with_slash: Urn = Urn.create_from_string( - "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" - ) - assert ( - Urn.url_encode(str(urn_with_slash)) - == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" - ) - - def test_invalid_urn(self) -> None: - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:()") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:(abc,)") diff --git a/metadata-ingestion/tests/unit/test_corp_group_urn.py b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_corp_group_urn.py rename to metadata-ingestion/tests/unit/urns/test_corp_group_urn.py index 9cfd925ef34eb5..1897a0e8686f09 100644 --- a/metadata-ingestion/tests/unit/test_corp_group_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corp_group_urn import CorpGroupUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpGroupUrn(unittest.TestCase): def test_parse_urn(self) -> None: corp_group_urn_str = "urn:li:corpGroup:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corp_group_urn.get_entity_id() == ["abc"] assert str(corp_group_urn) == corp_group_urn_str - assert corp_group_urn == CorpGroupUrn("corpGroup", ["abc"]) + assert corp_group_urn == CorpGroupUrn(name="abc") assert corp_group_urn == CorpGroupUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_corpuser_urn.py b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py similarity index 88% rename from metadata-ingestion/tests/unit/test_corpuser_urn.py rename to metadata-ingestion/tests/unit/urns/test_corpuser_urn.py index 40b83214a785b1..7a2a4f4ff4493c 100644 --- a/metadata-ingestion/tests/unit/test_corpuser_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corpuser_urn import CorpuserUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpuserUrn(unittest.TestCase): def test_parse_urn(self) -> None: corpuser_urn_str = "urn:li:corpuser:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corpuser_urn.get_entity_id() == ["abc"] assert str(corpuser_urn) == corpuser_urn_str - assert corpuser_urn == CorpuserUrn("corpuser", ["abc"]) + assert corpuser_urn == CorpuserUrn("abc") assert corpuser_urn == CorpuserUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_flow_urn.py b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py similarity index 77% rename from metadata-ingestion/tests/unit/test_data_flow_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_flow_urn.py index 8b739d39abf671..524411121d418b 100644 --- a/metadata-ingestion/tests/unit/test_data_flow_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataFlowUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_flow_urn_str = "urn:li:dataFlow:(airflow,def,prod)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert data_flow_urn.get_flow_id() == "def" assert data_flow_urn.get_env() == "prod" assert data_flow_urn.__str__() == "urn:li:dataFlow:(airflow,def,prod)" - assert data_flow_urn == DataFlowUrn("dataFlow", ["airflow", "def", "prod"]) + assert data_flow_urn == DataFlowUrn("airflow", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): @@ -20,8 +23,3 @@ def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): DataFlowUrn.create_from_string("urn:li:dataFlow:(airflow,flow_id)") - - with self.assertRaises(InvalidUrnError): - DataFlowUrn.create_from_string( - "urn:li:dataFlow:(airflow,flow_id,invalidEnv)" - ) diff --git a/metadata-ingestion/tests/unit/test_data_job_urn.py b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_job_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_job_urn.py index 0cd9084a515221..bf039cd2a91f96 100644 --- a/metadata-ingestion/tests/unit/test_data_job_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.data_job_urn import DataJobUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataJobUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_job_urn_str = ( @@ -17,7 +20,7 @@ def test_parse_urn(self) -> None: assert data_job_urn.get_job_id() == "job_id" assert data_job_urn.__str__() == data_job_urn_str assert data_job_urn == DataJobUrn( - "dataJob", ["urn:li:dataFlow:(airflow,flow_id,prod)", "job_id"] + "urn:li:dataFlow:(airflow,flow_id,prod)", "job_id" ) def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_process_instance_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py index e6cd201e12c7a6..a86f8dd99416ff 100644 --- a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_process_instance_urn import DataProcessInstanceUrn from datahub.utilities.urns.error import InvalidUrnError -class TestDomainUrn(unittest.TestCase): +@pytest.mark.filterwarnings("ignore::DeprecationWarning") +class TestDataProcessInstanceUrn(unittest.TestCase): def test_parse_urn(self) -> None: dataprocessinstance_urn_str = "urn:li:dataProcessInstance:abc" dataprocessinstance_urn = DataProcessInstanceUrn.create_from_string( @@ -14,9 +17,7 @@ def test_parse_urn(self) -> None: assert dataprocessinstance_urn.get_entity_id() == ["abc"] assert str(dataprocessinstance_urn) == dataprocessinstance_urn_str - assert dataprocessinstance_urn == DataProcessInstanceUrn( - "dataProcessInstance", ["abc"] - ) + assert dataprocessinstance_urn == DataProcessInstanceUrn("abc") assert dataprocessinstance_urn == DataProcessInstanceUrn.create_from_id("abc") assert "abc" == dataprocessinstance_urn.get_dataprocessinstance_id() diff --git a/metadata-ingestion/tests/unit/test_dataset_urn.py b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py similarity index 81% rename from metadata-ingestion/tests/unit/test_dataset_urn.py rename to metadata-ingestion/tests/unit/urns/test_dataset_urn.py index e1e37409d8a635..53065143a6ae4f 100644 --- a/metadata-ingestion/tests/unit/test_dataset_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py @@ -1,26 +1,25 @@ import unittest +import pytest + from datahub.utilities.urns.data_platform_urn import DataPlatformUrn from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDatasetUrn(unittest.TestCase): def test_parse_urn(self) -> None: - dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,PROD)" dataset_urn = DatasetUrn.create_from_string(dataset_urn_str) assert ( dataset_urn.get_data_platform_urn() == DataPlatformUrn.create_from_string("urn:li:dataPlatform:abc") ) assert dataset_urn.get_dataset_name() == "def" - assert dataset_urn.get_env() == "prod" - assert ( - dataset_urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - ) - assert dataset_urn == DatasetUrn( - "dataset", ["urn:li:dataPlatform:abc", "def", "prod"] - ) + assert dataset_urn.get_env() == "PROD" + assert dataset_urn.__str__() == dataset_urn_str + assert dataset_urn == DatasetUrn("urn:li:dataPlatform:abc", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_domain_urn.py b/metadata-ingestion/tests/unit/urns/test_domain_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_domain_urn.py rename to metadata-ingestion/tests/unit/urns/test_domain_urn.py index e5e4dffc525cda..843a5bf40f5c63 100644 --- a/metadata-ingestion/tests/unit/test_domain_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_domain_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.domain_urn import DomainUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDomainUrn(unittest.TestCase): def test_parse_urn(self) -> None: domain_urn_str = "urn:li:domain:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert domain_urn.get_entity_id() == ["abc"] assert str(domain_urn) == domain_urn_str - assert domain_urn == DomainUrn("domain", ["abc"]) + assert domain_urn == DomainUrn("abc") assert domain_urn == DomainUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_notebook_urn.py b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py similarity index 86% rename from metadata-ingestion/tests/unit/test_notebook_urn.py rename to metadata-ingestion/tests/unit/urns/test_notebook_urn.py index 6b245e29ceae95..3ec580f02142b7 100644 --- a/metadata-ingestion/tests/unit/test_notebook_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.notebook_urn import NotebookUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestNotebookUrn(unittest.TestCase): def test_parse_urn(self) -> None: notebook_urn_str = "urn:li:notebook:(querybook,123)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert notebook_urn.get_notebook_id() == "123" assert str(notebook_urn) == notebook_urn_str - assert notebook_urn == NotebookUrn("notebook", ["querybook", "123"]) + assert notebook_urn == NotebookUrn("querybook", "123") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_tag_urn.py b/metadata-ingestion/tests/unit/urns/test_tag_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_tag_urn.py rename to metadata-ingestion/tests/unit/urns/test_tag_urn.py index 630420dc1263f1..fa3664bcc02180 100644 --- a/metadata-ingestion/tests/unit/test_tag_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_tag_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.tag_urn import TagUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestTagUrn(unittest.TestCase): def test_parse_urn(self) -> None: tag_urn_str = "urn:li:tag:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert tag_urn.get_entity_id() == ["abc"] assert str(tag_urn) == tag_urn_str - assert tag_urn == TagUrn("tag", ["abc"]) + assert tag_urn == TagUrn("abc") assert tag_urn == TagUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/urns/test_urn.py b/metadata-ingestion/tests/unit/urns/test_urn.py new file mode 100644 index 00000000000000..1bf48082fec8c9 --- /dev/null +++ b/metadata-ingestion/tests/unit/urns/test_urn.py @@ -0,0 +1,56 @@ +import pytest + +from datahub.metadata.urns import DatasetUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning") + + +def test_parse_urn() -> None: + simple_urn_str = "urn:li:dataPlatform:abc" + urn = Urn.create_from_string(simple_urn_str) + assert urn.get_entity_id_as_string() == "abc" + assert urn.get_entity_id() == ["abc"] + assert urn.get_type() == "dataPlatform" + assert urn.get_domain() == "li" + assert urn.__str__() == simple_urn_str + assert urn == Urn("dataPlatform", ["abc"]) + + complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + urn = Urn.create_from_string(complex_urn_str) + assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" + assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] + assert urn.get_type() == "dataset" + assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + + +def test_url_encode_urn() -> None: + urn_with_slash: Urn = Urn.create_from_string( + "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" + ) + assert ( + Urn.url_encode(str(urn_with_slash)) + == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" + ) + + +def test_invalid_urn() -> None: + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:()") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:(abc,)") + + +def test_urn_type_dispatch() -> None: + urn = Urn.from_string("urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)") + assert isinstance(urn, DatasetUrn) + + with pytest.raises(InvalidUrnError, match="Passed an urn of type corpuser"): + DatasetUrn.from_string("urn:li:corpuser:foo") diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index a5296d074093be..1ba238b737236f 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -400,7 +400,7 @@ entities: - dataHubUpgradeRequest - dataHubUpgradeResult - name: inviteToken - category: core + category: internal keyAspect: inviteTokenKey aspects: - inviteToken @@ -425,7 +425,7 @@ entities: aspects: - postInfo - name: dataHubStepState - category: core + category: internal keyAspect: dataHubStepStateKey aspects: - dataHubStepStateProperties From a8476ee657a3c116b65de8cd14a731acff164503 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:34:48 -0500 Subject: [PATCH 05/23] fix(airflow): support inlet datajobs correctly in v1 plugin (#9331) --- docs/lineage/airflow.md | 42 ++++++++-- .../datahub_listener.py | 4 + .../datahub_plugin_v22.py | 43 ++++++---- .../integration/goldens/v1_basic_iolets.json | 64 ++++----------- .../integration/goldens/v1_simple_dag.json | 78 ++++++------------- .../integration/goldens/v2_basic_iolets.json | 18 ++--- .../v2_basic_iolets_no_dag_listener.json | 14 ++-- .../integration/goldens/v2_simple_dag.json | 34 ++++---- .../v2_simple_dag_no_dag_listener.json | 28 +++---- .../goldens/v2_snowflake_operator.json | 14 ++-- .../goldens/v2_sqlite_operator.json | 62 +++++++-------- .../v2_sqlite_operator_no_dag_listener.json | 70 ++++++++--------- .../tests/integration/test_plugin.py | 52 ++++++++++--- .../datahub/api/entities/datajob/datajob.py | 3 +- 14 files changed, 269 insertions(+), 257 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 32da518d6c04c7..8fd38f560bfbb5 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -8,7 +8,7 @@ If you're looking to schedule DataHub ingestion using Airflow, see the guide on The DataHub Airflow plugin supports: -- Automatic column-level lineage extraction from various operators e.g. `SqlOperator`s (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and a few others. +- Automatic column-level lineage extraction from various operators e.g. SQL operators (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and more. - Airflow DAG and tasks, including properties, ownership, and tags. - Task run information, including task successes and failures. - Manual lineage annotations using `inlets` and `outlets` on Airflow operators. @@ -76,12 +76,6 @@ enabled = True # default | log_level | _no change_ | [debug] Set the log level for the plugin. | | debug_emitter | false | [debug] If true, the plugin will log the emitted events. | -### Automatic lineage extraction - -To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). - -The SQL-related extractors have been updated to use DataHub's SQL parser, which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. We discussed the DataHub SQL parser, including why schema-aware parsing works better and how it performs on benchmarks, during the [June 2023 community town hall](https://youtu.be/1QVcUmRQK5E?si=U27zygR7Gi_KdkzE&t=2309). - ## DataHub Plugin v1 ### Installation @@ -152,6 +146,40 @@ conn_id = datahub_rest_default # or datahub_kafka_default Emitting DataHub ... ``` +## Automatic lineage extraction + +Only the v2 plugin supports automatic lineage extraction. If you're using the v1 plugin, you must use manual lineage annotation or emit lineage directly. + +To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). +As such, we support a superset of the default operators that Airflow/OpenLineage supports. + +The SQL-related extractors have been updated to use [DataHub's SQL lineage parser](https://blog.datahubproject.io/extracting-column-level-lineage-from-sql-779b8ce17567), which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. + +Supported operators: + +- `SQLExecuteQueryOperator`, including any subclasses. Note that in newer versions of Airflow (generally Airflow 2.5+), most SQL operators inherit from this class. +- `AthenaOperator` and `AWSAthenaOperator` +- `BigQueryOperator` and `BigQueryExecuteQueryOperator` +- `MySqlOperator` +- `PostgresOperator` +- `RedshiftSQLOperator` +- `SnowflakeOperator` and `SnowflakeOperatorAsync` +- `SqliteOperator` +- `TrinoOperator` + + + ## Manual Lineage Annotation ### Using `inlets` and `outlets` diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index c39eef26356581..debc91700d3db7 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -296,6 +296,7 @@ def _extract_lineage( logger.debug("Merging start datajob into finish datajob") datajob.inlets.extend(original_datajob.inlets) datajob.outlets.extend(original_datajob.outlets) + datajob.upstream_urns.extend(original_datajob.upstream_urns) datajob.fine_grained_lineages.extend(original_datajob.fine_grained_lineages) for k, v in original_datajob.properties.items(): @@ -304,6 +305,9 @@ def _extract_lineage( # Deduplicate inlets/outlets. datajob.inlets = list(sorted(set(datajob.inlets), key=lambda x: str(x))) datajob.outlets = list(sorted(set(datajob.outlets), key=lambda x: str(x))) + datajob.upstream_urns = list( + sorted(set(datajob.upstream_urns), key=lambda x: str(x)) + ) # Write all other OL facets as DataHub properties. if task_metadata: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index f9a2119f51e329..51a4151bc8207e 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -18,6 +18,10 @@ ) from datahub_airflow_plugin._config import get_lineage_config from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator +from datahub_airflow_plugin.entities import ( + entities_to_datajob_urn_list, + entities_to_dataset_urn_list, +) from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook from datahub_airflow_plugin.lineage.datahub import DatahubLineageConfig @@ -94,7 +98,8 @@ def datahub_task_status_callback(context, status): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) emitter = ( DatahubGenericHook(config.datahub_conn_id).get_underlying_hook().make_emitter() @@ -116,13 +121,15 @@ def datahub_task_status_callback(context, status): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub Datajob: {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) @@ -169,7 +176,8 @@ def datahub_pre_execution(context): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) datajob = AirflowGenerator.generate_datajob( cluster=config.cluster, @@ -178,14 +186,15 @@ def datahub_pre_execution(context): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub dataJob {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json index 6b460e99b1f281..a21df71efcdacf 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -95,14 +95,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -151,17 +152,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -257,14 +247,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -313,17 +304,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -389,9 +369,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.143271", - "start_date": "2023-11-08 09:55:05.801617+00:00", - "end_date": "2023-11-08 09:55:05.944888+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -408,7 +388,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437305801, + "time": 1701222667932, "actor": "urn:li:corpuser:datahub" } } @@ -437,8 +417,7 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -501,17 +480,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -541,7 +509,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305801, + "timestampMillis": 1701222667932, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -558,7 +526,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305944, + "timestampMillis": 1701222668122, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json index 7ec172e3678dcf..61167223505410 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -94,13 +94,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -127,17 +128,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -220,13 +210,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -253,17 +244,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -318,9 +298,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.120524", - "start_date": "2023-11-08 09:54:06.065112+00:00", - "end_date": "2023-11-08 09:54:06.185636+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -337,7 +317,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437246065, + "time": 1701222595752, "actor": "urn:li:corpuser:datahub" } } @@ -364,8 +344,7 @@ "json": { "inputs": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -405,17 +384,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -434,7 +402,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246065, + "timestampMillis": 1701222595752, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -451,7 +419,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246185, + "timestampMillis": 1701222595962, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -476,7 +444,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -687,9 +655,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.099975", - "start_date": "2023-11-08 09:54:09.744583+00:00", - "end_date": "2023-11-08 09:54:09.844558+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -706,7 +674,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437249744, + "time": 1701222599804, "actor": "urn:li:corpuser:datahub" } } @@ -731,7 +699,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249744, + "timestampMillis": 1701222599804, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -748,7 +716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249844, + "timestampMillis": 1701222599959, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json index 6767a368f366ae..7c52cbcddc13c6 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -75,7 +75,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:07:55.311482+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671275311, + "time": 1701223416947, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671275311, + "timestampMillis": 1701223416947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -387,7 +387,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671276777, + "timestampMillis": 1701223417702, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json index 63b0a059355541..150f95d5171c73 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:11:17.444435+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643477444, + "time": 1701223185349, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643477444, + "timestampMillis": 1701223185349, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643478123, + "timestampMillis": 1701223186055, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json index c558f79c32e150..0248ab0473c9ea 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -76,7 +76,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:07.193282+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671167193, + "time": 1701223349283, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671167193, + "timestampMillis": 1701223349283, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -316,7 +316,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671168726, + "timestampMillis": 1701223349928, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -453,7 +453,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -522,9 +522,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:19.970466+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -541,7 +541,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671179970, + "time": 1701223355004, "actor": "urn:li:corpuser:datahub" } } @@ -566,7 +566,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671179970, + "timestampMillis": 1701223355004, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -595,7 +595,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -662,7 +662,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671180730, + "timestampMillis": 1701223355580, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json index ec0f3cab1e81f3..7860251fc22dcc 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:10.856995+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643410856, + "time": 1701223113232, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643410856, + "timestampMillis": 1701223113232, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643411390, + "timestampMillis": 1701223113778, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -446,7 +446,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -580,9 +580,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:15.128009+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -599,7 +599,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643415128, + "time": 1701223119777, "actor": "urn:li:corpuser:datahub" } } @@ -624,7 +624,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415128, + "timestampMillis": 1701223119777, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -720,7 +720,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415856, + "timestampMillis": 1701223120456, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json index 0a704ed10c911e..1bf0820c7cb41f 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -226,9 +226,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-09-30 06:55:36.844976+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -245,7 +245,7 @@ "name": "snowflake_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696056936844, + "time": 1701223475050, "actor": "urn:li:corpuser:datahub" } } @@ -318,7 +318,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056936844, + "timestampMillis": 1701223475050, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -496,7 +496,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056938096, + "timestampMillis": 1701223476665, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index 3b4b60174f99f1..3965ee4a10ad05 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:10.262813+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401750262, + "time": 1701223533895, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750262, + "timestampMillis": 1701223533895, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750651, + "timestampMillis": 1701223534302, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -557,9 +557,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:15.013834+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -576,7 +576,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401755013, + "time": 1701223539348, "actor": "urn:li:corpuser:datahub" } } @@ -625,7 +625,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755013, + "timestampMillis": 1701223539348, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -735,7 +735,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755600, + "timestampMillis": 1701223540058, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -920,9 +920,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:20.216818+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -939,7 +939,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401760216, + "time": 1701223548187, "actor": "urn:li:corpuser:datahub" } } @@ -1012,7 +1012,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401760216, + "timestampMillis": 1701223548187, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1248,7 +1248,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401761237, + "timestampMillis": 1701223549416, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1365,9 +1365,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:26.243934+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1384,7 +1384,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401766243, + "time": 1701223557795, "actor": "urn:li:corpuser:datahub" } } @@ -1433,7 +1433,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401766243, + "timestampMillis": 1701223557795, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1545,7 +1545,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401767373, + "timestampMillis": 1701223559079, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1662,9 +1662,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:32.075613+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1681,7 +1681,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401772075, + "time": 1701223564459, "actor": "urn:li:corpuser:datahub" } } @@ -1730,7 +1730,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401772075, + "timestampMillis": 1701223564459, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1842,7 +1842,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401773454, + "timestampMillis": 1701223566107, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index 99a8aadb7fd9c1..a9f9fbac56fffc 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:17.805860+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643537805, + "time": 1701223251992, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643537805, + "timestampMillis": 1701223251992, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643538759, + "timestampMillis": 1701223253042, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -467,7 +467,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -614,9 +614,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:22.560376+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -633,7 +633,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643542560, + "time": 1701223258947, "actor": "urn:li:corpuser:datahub" } } @@ -682,7 +682,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643542560, + "timestampMillis": 1701223258947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -792,7 +792,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643543925, + "timestampMillis": 1701223260414, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -817,7 +817,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1034,9 +1034,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:29.429032+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1053,7 +1053,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643549429, + "time": 1701223266595, "actor": "urn:li:corpuser:datahub" } } @@ -1126,7 +1126,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643549429, + "timestampMillis": 1701223266595, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1362,7 +1362,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643551423, + "timestampMillis": 1701223268728, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1387,7 +1387,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1536,9 +1536,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:37.423556+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1555,7 +1555,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643557423, + "time": 1701223275045, "actor": "urn:li:corpuser:datahub" } } @@ -1604,7 +1604,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643557423, + "timestampMillis": 1701223275045, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1716,7 +1716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643559607, + "timestampMillis": 1701223277378, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1741,7 +1741,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1890,9 +1890,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:43.792375+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1909,7 +1909,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643563792, + "time": 1701223282010, "actor": "urn:li:corpuser:datahub" } } @@ -1958,7 +1958,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643563792, + "timestampMillis": 1701223282010, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -2070,7 +2070,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643566350, + "timestampMillis": 1701223284766, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py index a2b7fd151a1e41..0c5d11f693eef2 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py @@ -1,6 +1,7 @@ import contextlib import dataclasses import functools +import json import logging import os import pathlib @@ -8,12 +9,13 @@ import signal import subprocess import time -from typing import Iterator, Sequence +from typing import Any, Iterator, Sequence import pytest import requests import tenacity from airflow.models.connection import Connection +from datahub.ingestion.sink.file import write_metadata_file from datahub.testing.compare_metadata_json import assert_metadata_files_equal from datahub_airflow_plugin._airflow_shims import ( @@ -358,26 +360,58 @@ def test_airflow_plugin( print("Sleeping for a few seconds to let the plugin finish...") time.sleep(10) + _sanitize_output_file(airflow_instance.metadata_file) + check_golden_file( pytestconfig=pytestconfig, output_path=airflow_instance.metadata_file, golden_path=golden_path, ignore_paths=[ # Timing-related items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", - # Host-specific items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['pid'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['hostname'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['unixname'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", # TODO: If we switched to Git urls, maybe we could get this to work consistently. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['openlineage_.*'\]", ], ) +def _sanitize_output_file(output_path: pathlib.Path) -> None: + # Overwrite some custom properties in the output file to make it easier to compare. + + props_job = { + "fileloc": "", + } + props_process = { + "start_date": "", + "end_date": "", + "duration": "", + } + + def _sanitize(obj: Any) -> None: + if isinstance(obj, dict) and "customProperties" in obj: + replacement_props = ( + props_process if "run_id" in obj["customProperties"] else props_job + ) + obj["customProperties"] = { + k: replacement_props.get(k, v) + for k, v in obj["customProperties"].items() + } + elif isinstance(obj, dict): + for v in obj.values(): + _sanitize(v) + elif isinstance(obj, list): + for v in obj: + _sanitize(v) + + objs = json.loads(output_path.read_text()) + _sanitize(objs) + + write_metadata_file(output_path, objs) + + if __name__ == "__main__": # When run directly, just set up a local airflow instance. import tempfile diff --git a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py index 6c42e830e223b1..1ec74b94179d5c 100644 --- a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py +++ b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py @@ -40,7 +40,8 @@ class DataJob: group_owners Set[str]): A list of group ids that own this job. inlets (List[str]): List of urns the DataProcessInstance consumes outlets (List[str]): List of urns the DataProcessInstance produces - input_datajob_urns: List[DataJobUrn] = field(default_factory=list) + fine_grained_lineages: Column lineage for the inlets and outlets + upstream_urns: List[DataJobUrn] = field(default_factory=list) """ id: str From f9fd9467ef14cd5b39cac4c71e214d9088f0f9a1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 21:00:43 -0500 Subject: [PATCH 06/23] feat(ingest): clean up DataHubRestEmitter return type (#9286) Co-authored-by: Andrew Sikowitz --- .../config/HomePageOnboardingConfig.tsx | 3 +-- docs/how/updating-datahub.md | 1 + .../datahub_airflow_plugin/hooks/datahub.py | 23 ++++++++++++++----- .../airflow-plugin/tests/unit/test_airflow.py | 8 +++---- .../src/datahub/emitter/generic_emitter.py | 7 ++---- .../src/datahub/emitter/rest_emitter.py | 19 ++++++++------- .../datahub/ingestion/sink/datahub_rest.py | 20 ++++++++++++---- .../tests/test_helpers/graph_helpers.py | 12 +++++----- .../tests/unit/test_rest_emitter.py | 6 +++++ 9 files changed, 62 insertions(+), 37 deletions(-) diff --git a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx index 28a0465a1b2f74..8b361db5ab344c 100644 --- a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx @@ -94,8 +94,7 @@ export const HomePageOnboardingConfig: OnboardingStep[] = [ Here are your organization's Data Platforms. Data Platforms represent specific third-party Data Systems or Tools. Examples include Data Warehouses like Snowflake, - Orchestrators like - Airflow, and Dashboarding tools like Looker. + Orchestrators like Airflow, and Dashboarding tools like Looker. ), }, diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index dad05fd0153f24..df179b0d0d2f7d 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -11,6 +11,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. +- #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. ### Potential Downtime diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py index 9604931795ccb9..b60f20c5bf8b28 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from airflow.models.connection import Connection from datahub.emitter.kafka_emitter import DatahubKafkaEmitter - from datahub.emitter.rest_emitter import DatahubRestEmitter + from datahub.emitter.rest_emitter import DataHubRestEmitter from datahub.emitter.synchronized_file_emitter import SynchronizedFileEmitter from datahub.ingestion.sink.datahub_kafka import KafkaSinkConfig @@ -63,6 +63,13 @@ def test_connection(self) -> Tuple[bool, str]: return True, "Successfully connected to DataHub." def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: + # We have a few places in the codebase that use this method directly, despite + # it being "private". For now, we retain backwards compatibility by keeping + # this method around, but should stop using it in the future. + host, token, extra_args = self._get_config_v2() + return host, token, extra_args.get("timeout_sec") + + def _get_config_v2(self) -> Tuple[str, Optional[str], Dict]: conn: "Connection" = self.get_connection(self.datahub_rest_conn_id) host = conn.host @@ -74,14 +81,18 @@ def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: "host parameter should not contain a port number if the port is specified separately" ) host = f"{host}:{conn.port}" - password = conn.password - timeout_sec = conn.extra_dejson.get("timeout_sec") - return (host, password, timeout_sec) + token = conn.password + + extra_args = conn.extra_dejson + return (host, token, extra_args) - def make_emitter(self) -> "DatahubRestEmitter": + def make_emitter(self) -> "DataHubRestEmitter": import datahub.emitter.rest_emitter - return datahub.emitter.rest_emitter.DatahubRestEmitter(*self._get_config()) + host, token, extra_args = self._get_config_v2() + return datahub.emitter.rest_emitter.DataHubRestEmitter( + host, token, **extra_args + ) def emit( self, diff --git a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index 7fbf7079959942..93b4af0501985e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -99,19 +99,19 @@ def patch_airflow_connection(conn: Connection) -> Iterator[Connection]: yield conn -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook(mock_emitter): with patch_airflow_connection(datahub_rest_connection_config) as config: assert config.conn_id hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, None) + mock_emitter.assert_called_once_with(config.host, None) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook_with_timeout(mock_emitter): with patch_airflow_connection( datahub_rest_connection_config_with_timeout @@ -120,7 +120,7 @@ def test_datahub_rest_hook_with_timeout(mock_emitter): hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, 5) + mock_emitter.assert_called_once_with(config.host, None, timeout_sec=5) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) diff --git a/metadata-ingestion/src/datahub/emitter/generic_emitter.py b/metadata-ingestion/src/datahub/emitter/generic_emitter.py index 28138c61827583..54b3d6841fe9c6 100644 --- a/metadata-ingestion/src/datahub/emitter/generic_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/generic_emitter.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Optional, Union +from typing import Callable, Optional, Union from typing_extensions import Protocol @@ -21,10 +21,7 @@ def emit( # required. However, this would be a breaking change that may need # more careful consideration. callback: Optional[Callable[[Exception, str], None]] = None, - # TODO: The rest emitter returns timestamps as the return type. For now - # we smooth over that detail using Any, but eventually we should - # standardize on a return type. - ) -> Any: + ) -> None: raise NotImplementedError def flush(self) -> None: diff --git a/metadata-ingestion/src/datahub/emitter/rest_emitter.py b/metadata-ingestion/src/datahub/emitter/rest_emitter.py index afb19df9791af3..4598c7faa21058 100644 --- a/metadata-ingestion/src/datahub/emitter/rest_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/rest_emitter.py @@ -1,10 +1,9 @@ -import datetime import functools import json import logging import os from json.decoder import JSONDecodeError -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union import requests from deprecated import deprecated @@ -60,6 +59,7 @@ def __init__( self, gms_server: str, token: Optional[str] = None, + timeout_sec: Optional[float] = None, connect_timeout_sec: Optional[float] = None, read_timeout_sec: Optional[float] = None, retry_status_codes: Optional[List[int]] = None, @@ -103,11 +103,12 @@ def __init__( if disable_ssl_verification: self._session.verify = False - if connect_timeout_sec: - self._connect_timeout_sec = connect_timeout_sec - - if read_timeout_sec: - self._read_timeout_sec = read_timeout_sec + self._connect_timeout_sec = ( + connect_timeout_sec or timeout_sec or _DEFAULT_CONNECT_TIMEOUT_SEC + ) + self._read_timeout_sec = ( + read_timeout_sec or timeout_sec or _DEFAULT_READ_TIMEOUT_SEC + ) if self._connect_timeout_sec < 1 or self._read_timeout_sec < 1: logger.warning( @@ -208,8 +209,7 @@ def emit( UsageAggregation, ], callback: Optional[Callable[[Exception, str], None]] = None, - ) -> Tuple[datetime.datetime, datetime.datetime]: - start_time = datetime.datetime.now() + ) -> None: try: if isinstance(item, UsageAggregation): self.emit_usage(item) @@ -226,7 +226,6 @@ def emit( else: if callback: callback(None, "success") # type: ignore - return start_time, datetime.datetime.now() def emit_mce(self, mce: MetadataChangeEvent) -> None: url = f"{self._gms_server}/entities?action=ingest" diff --git a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py index d3abde0d36993a..fedd8520dde4d4 100644 --- a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py +++ b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py @@ -4,10 +4,10 @@ import logging from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass -from datetime import timedelta +from datetime import datetime, timedelta from enum import auto from threading import BoundedSemaphore -from typing import Union +from typing import Tuple, Union from datahub.cli.cli_utils import set_env_variables_override_config from datahub.configuration.common import ( @@ -181,6 +181,18 @@ def _write_done_callback( self.report.report_failure({"e": e}) write_callback.on_failure(record_envelope, Exception(e), {}) + def _emit_wrapper( + self, + record: Union[ + MetadataChangeEvent, + MetadataChangeProposal, + MetadataChangeProposalWrapper, + ], + ) -> Tuple[datetime, datetime]: + start_time = datetime.now() + self.emitter.emit(record) + return start_time, datetime.now() + def write_record_async( self, record_envelope: RecordEnvelope[ @@ -194,7 +206,7 @@ def write_record_async( ) -> None: record = record_envelope.record if self.config.mode == SyncOrAsync.ASYNC: - write_future = self.executor.submit(self.emitter.emit, record) + write_future = self.executor.submit(self._emit_wrapper, record) write_future.add_done_callback( functools.partial( self._write_done_callback, record_envelope, write_callback @@ -204,7 +216,7 @@ def write_record_async( else: # execute synchronously try: - (start, end) = self.emitter.emit(record) + (start, end) = self._emit_wrapper(record) write_callback.on_success(record_envelope, success_metadata={}) except Exception as e: write_callback.on_failure(record_envelope, e, failure_metadata={}) diff --git a/metadata-ingestion/tests/test_helpers/graph_helpers.py b/metadata-ingestion/tests/test_helpers/graph_helpers.py index 4c2c46c2f97ced..2e73f5e2c6cdb8 100644 --- a/metadata-ingestion/tests/test_helpers/graph_helpers.py +++ b/metadata-ingestion/tests/test_helpers/graph_helpers.py @@ -1,6 +1,5 @@ -from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union from datahub.emitter.mce_builder import Aspect from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -22,7 +21,9 @@ class MockDataHubGraph(DataHubGraph): - def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: + def __init__( + self, entity_graph: Optional[Dict[str, Dict[str, Any]]] = None + ) -> None: self.emitted: List[ Union[ MetadataChangeEvent, @@ -30,7 +31,7 @@ def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: MetadataChangeProposalWrapper, ] ] = [] - self.entity_graph = entity_graph + self.entity_graph = entity_graph or {} def import_file(self, file: Path) -> None: """Imports metadata from any MCE/MCP file. Does not clear prior loaded data. @@ -110,9 +111,8 @@ def emit( UsageAggregationClass, ], callback: Union[Callable[[Exception, str], None], None] = None, - ) -> Tuple[datetime, datetime]: + ) -> None: self.emitted.append(item) # type: ignore - return (datetime.now(), datetime.now()) def emit_mce(self, mce: MetadataChangeEvent) -> None: self.emitted.append(mce) diff --git a/metadata-ingestion/tests/unit/test_rest_emitter.py b/metadata-ingestion/tests/unit/test_rest_emitter.py index e56cbd2c41c6b0..b4d7cb17b66f5c 100644 --- a/metadata-ingestion/tests/unit/test_rest_emitter.py +++ b/metadata-ingestion/tests/unit/test_rest_emitter.py @@ -20,6 +20,12 @@ def test_datahub_rest_emitter_timeout_construction(): assert emitter._read_timeout_sec == 4 +def test_datahub_rest_emitter_general_timeout_construction(): + emitter = DatahubRestEmitter(MOCK_GMS_ENDPOINT, timeout_sec=2, read_timeout_sec=4) + assert emitter._connect_timeout_sec == 2 + assert emitter._read_timeout_sec == 4 + + def test_datahub_rest_emitter_retry_construction(): emitter = DatahubRestEmitter( MOCK_GMS_ENDPOINT, From 4d9eb12cba3a36ca30a7b07fea9aeb6a13443522 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:10 -0500 Subject: [PATCH 07/23] feat(ingest/dbt): support custom ownership types in dbt meta (#9332) --- metadata-ingestion/docs/sources/dbt/dbt.md | 2 +- .../src/datahub/utilities/mapping.py | 10 ++++++++- metadata-ingestion/tests/unit/test_mapping.py | 22 +++++++++++++++++-- 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/docs/sources/dbt/dbt.md b/metadata-ingestion/docs/sources/dbt/dbt.md index 43ced13c3b1f8d..6cc8772871c2f2 100644 --- a/metadata-ingestion/docs/sources/dbt/dbt.md +++ b/metadata-ingestion/docs/sources/dbt/dbt.md @@ -62,7 +62,7 @@ We support the following operations: 1. add_tag - Requires `tag` property in config. 2. add_term - Requires `term` property in config. 3. add_terms - Accepts an optional `separator` property in config. -4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which you can set to one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). +4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which can be set to either a [custom ownership type](../../../../docs/ownership/ownership-types.md) urn like `urn:li:ownershipType:architect` or one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). 5. add_doc_link - Requires `link` and `description` properties in config. Upon ingestion run, this will overwrite current links in the institutional knowledge section with this new link. The anchor text is defined here in the meta_mappings as `description`. Note: diff --git a/metadata-ingestion/src/datahub/utilities/mapping.py b/metadata-ingestion/src/datahub/utilities/mapping.py index f91c01d901ac1e..00f7d370d16765 100644 --- a/metadata-ingestion/src/datahub/utilities/mapping.py +++ b/metadata-ingestion/src/datahub/utilities/mapping.py @@ -191,6 +191,7 @@ def convert_to_aspects( OwnerClass( owner=x.get("urn"), type=x.get("category"), + typeUrn=x.get("categoryUrn"), source=OwnershipSourceClass(type=self.owner_source_type) if self.owner_source_type else None, @@ -281,18 +282,25 @@ def get_operation_value( operation_config.get(Constants.OWNER_CATEGORY) or OwnershipTypeClass.DATAOWNER ) - owner_category = owner_category.upper() + owner_category_urn = None + if owner_category.startswith("urn:li:"): + owner_category_urn = owner_category + owner_category = OwnershipTypeClass.DATAOWNER + else: + owner_category = owner_category.upper() if self.strip_owner_email_id: owner_id = self.sanitize_owner_ids(owner_id) if operation_config[Constants.OWNER_TYPE] == Constants.USER_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.USER), "category": owner_category, + "categoryUrn": owner_category_urn, } elif operation_config[Constants.OWNER_TYPE] == Constants.GROUP_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.GROUP), "category": owner_category, + "categoryUrn": owner_category_urn, } elif ( operation_type == Constants.ADD_TERM_OPERATION diff --git a/metadata-ingestion/tests/unit/test_mapping.py b/metadata-ingestion/tests/unit/test_mapping.py index 5c258f16535f88..de35451c9ec4b5 100644 --- a/metadata-ingestion/tests/unit/test_mapping.py +++ b/metadata-ingestion/tests/unit/test_mapping.py @@ -174,7 +174,11 @@ def test_operation_processor_advanced_matching_owners(): def test_operation_processor_ownership_category(): - raw_props = {"user_owner": "@test_user", "business_owner": "alice"} + raw_props = { + "user_owner": "@test_user", + "business_owner": "alice", + "architect": "bob", + } processor = OperationProcessor( operation_defs={ "user_owner": { @@ -193,6 +197,14 @@ def test_operation_processor_ownership_category(): "owner_category": OwnershipTypeClass.BUSINESS_OWNER, }, }, + "architect": { + "match": ".*", + "operation": "add_owner", + "config": { + "owner_type": "user", + "owner_category": "urn:li:ownershipType:architect", + }, + }, }, owner_source_type="SOURCE_CONTROL", ) @@ -200,7 +212,7 @@ def test_operation_processor_ownership_category(): assert "add_owner" in aspect_map ownership_aspect: OwnershipClass = aspect_map["add_owner"] - assert len(ownership_aspect.owners) == 2 + assert len(ownership_aspect.owners) == 3 new_owner: OwnerClass = ownership_aspect.owners[0] assert new_owner.owner == "urn:li:corpGroup:test_user" assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" @@ -211,6 +223,12 @@ def test_operation_processor_ownership_category(): assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" assert new_owner.type and new_owner.type == OwnershipTypeClass.BUSINESS_OWNER + new_owner = ownership_aspect.owners[2] + assert new_owner.owner == "urn:li:corpuser:bob" + assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" + assert new_owner.type == OwnershipTypeClass.DATAOWNER # dummy value + assert new_owner.typeUrn == "urn:li:ownershipType:architect" + def test_operation_processor_advanced_matching_tags(): raw_props = { From 82f375ded6c98160ad9edbe6488cbc16b2a01d22 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:20 -0500 Subject: [PATCH 08/23] docs(ingest/lookml): clarify that ssh key has no passphrase (#9348) --- docs/quick-ingestion-guides/looker/setup.md | 3 ++- metadata-ingestion/docs/sources/looker/lookml_pre.md | 2 +- metadata-ingestion/src/datahub/configuration/git.py | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/quick-ingestion-guides/looker/setup.md b/docs/quick-ingestion-guides/looker/setup.md index c08de116895ea5..81c2c9e4ba08c0 100644 --- a/docs/quick-ingestion-guides/looker/setup.md +++ b/docs/quick-ingestion-guides/looker/setup.md @@ -129,7 +129,8 @@ Follow the below steps to create the GitHub Deploy Key. ### Generate a private-public SSH key pair ```bash - ssh-keygen -t rsa -f looker_datahub_deploy_key +ssh-keygen -t rsa -f looker_datahub_deploy_key +# If prompted, don't add a passphrase to the key ``` This will typically generate two files like the one below. diff --git a/metadata-ingestion/docs/sources/looker/lookml_pre.md b/metadata-ingestion/docs/sources/looker/lookml_pre.md index d78a30fe6ec372..68a4828a5ce2ae 100644 --- a/metadata-ingestion/docs/sources/looker/lookml_pre.md +++ b/metadata-ingestion/docs/sources/looker/lookml_pre.md @@ -6,7 +6,7 @@ To use LookML ingestion through the UI, or automate github checkout through the In a nutshell, there are three steps: -1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key) +1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key). Do not add a passphrase. ![Image](https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/gitssh/ssh-key-generation.png) 2. Add the public key to your Looker git repo as a deploy key with read access (no need to provision write access). Follow the guide [here](https://docs.github.com/en/developers/overview/managing-deploy-keys#deploy-keys) for that. diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 0c7d64d4aafcf2..80eb41c100b103 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -77,7 +77,9 @@ class GitInfo(GitReference): deploy_key_file: Optional[FilePath] = Field( None, - description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. Use a file where possible, else see deploy_key for a config field that accepts a raw string.", + description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. " + "Use a file where possible, else see deploy_key for a config field that accepts a raw string. " + "We expect the key not have a passphrase.", ) deploy_key: Optional[SecretStr] = Field( None, From 3142efcad5a06c06d5546b05b7f259c1eba109c5 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Fri, 1 Dec 2023 14:55:26 +0530 Subject: [PATCH 09/23] fix(migrate): connect with token without dry-run (#9317) --- metadata-ingestion/src/datahub/cli/migrate.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/cli/migrate.py b/metadata-ingestion/src/datahub/cli/migrate.py index e83a8ed8feaad4..30f82987a6b650 100644 --- a/metadata-ingestion/src/datahub/cli/migrate.py +++ b/metadata-ingestion/src/datahub/cli/migrate.py @@ -23,7 +23,7 @@ SchemaKey, ) from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph from datahub.metadata.schema_classes import ( ContainerKeyClass, ContainerPropertiesClass, @@ -141,13 +141,7 @@ def dataplatform2instance_func( migration_report = MigrationReport(run_id, dry_run, keep) system_metadata = SystemMetadataClass(runId=run_id) - # initialize for dry-run - graph = DataHubGraph(config=DataHubGraphConfig(server="127.0.0.1")) - - if not dry_run: - graph = DataHubGraph( - config=DataHubGraphConfig(server=cli_utils.get_session_and_host()[1]) - ) + graph = get_default_graph() urns_to_migrate = [] From 864d3dfa16b6abbb09361f52112dbb4b95bf6775 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 09:18:07 -0800 Subject: [PATCH 10/23] fix(ui): Minor: fix unnecessary lineage tab scroll by removing -1 margin on lists (#9364) --- .../src/app/entity/shared/tabs/Entity/components/EntityList.tsx | 1 - .../app/recommendations/renderer/component/EntityNameList.tsx | 1 - 2 files changed, 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx index 758b070864a9af..3a9061fd97d6e7 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx @@ -8,7 +8,6 @@ import { EntityType } from '../../../../../../types.generated'; const StyledList = styled(List)` padding-left: 40px; padding-right: 40px; - margin-top: -1px; .ant-list-items > .ant-list-item { padding-right: 0px; padding-left: 0px; diff --git a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx index 4ff78e64625b14..9e8454ae223170 100644 --- a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx +++ b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx @@ -11,7 +11,6 @@ import { capitalizeFirstLetterOnly } from '../../../shared/textUtil'; export const StyledList = styled(List)` overflow-y: auto; height: 100%; - margin-top: -1px; box-shadow: ${(props) => props.theme.styles['box-shadow']}; flex: 1; .ant-list-items > .ant-list-item { From 36c7813f89b1f20898e07f24c5f209f5c57947d7 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:18:39 +0530 Subject: [PATCH 11/23] feat(ui): Support dynamic entity profile tab names (#9352) --- .../app/entity/shared/containers/profile/EntityProfile.tsx | 1 + .../entity/shared/containers/profile/header/EntityTabs.tsx | 5 +++-- datahub-web-react/src/app/entity/shared/types.ts | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index 74c127cb05dd9c..d7b7a4da804ef4 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -238,6 +238,7 @@ export const EntityProfile = ({ visible: () => true, enabled: () => true, }, + getDynamicName: () => '', })) || []; const visibleTabs = [...sortedTabs, ...autoRenderTabs].filter((tab) => diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index ea5c263ef7abc7..096f1db617d92a 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -44,10 +44,11 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { onTabClick={(tab: string) => routeToTab({ tabName: tab })} > {tabs.map((tab) => { + const tabName = (tab.getDynamicName && tab.getDynamicName(entityData, baseEntity)) || tab.name; if (!tab.display?.enabled(entityData, baseEntity)) { - return ; + return ; } - return ; + return ; })} ); diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index 6596711d4e82a6..ae8ab747f7cb6c 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -50,6 +50,7 @@ export type EntityTab = { }; properties?: any; id?: string; + getDynamicName?: (GenericEntityProperties, T) => string; }; export type EntitySidebarSection = { From 7b0a8f422b02c47ffb4fe2ddd5f61c7230de0c03 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 14:23:11 -0500 Subject: [PATCH 12/23] docs: add setup instructions for mac dependencies (#9346) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs/developers.md | 140 ++++++++++++++++++++++++++++----------------- 1 file changed, 89 insertions(+), 51 deletions(-) diff --git a/docs/developers.md b/docs/developers.md index 52fd7d356a44c2..c3c3a59283e662 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -4,33 +4,53 @@ title: "Local Development" # DataHub Developer's Guide -## Pre-requirements - - [Java 11 SDK](https://openjdk.org/projects/jdk/11/) - - [Python 3.10] (https://www.python.org/downloads/release/python-3100/) - - [Docker](https://www.docker.com/) - - [Docker Compose](https://docs.docker.com/compose/) - - Docker engine with at least 8GB of memory to run tests. +## Requirements - :::note +- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/) +- [Python 3.10](https://www.python.org/downloads/release/python-3100/) +- [Docker](https://www.docker.com/) +- [Docker Compose](https://docs.docker.com/compose/) +- Docker engine with at least 8GB of memory to run tests. - Do not try to use a JDK newer than JDK 11. The build process does not work with newer JDKs currently. +:::caution - ::: +Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions. + +::: + +On macOS, these can be installed using [Homebrew](https://brew.sh/). + +```shell +# Install Java 8 and 11 +brew tap homebrew/cask-versions +brew install java11 +brew install --cask zulu8 + +# Install Python +brew install python@3.10 # you may need to add this to your PATH +# alternatively, you can use pyenv to manage your python versions + +# Install docker and docker compose +brew install --cask docker +``` ## Building the Project Fork and clone the repository if haven't done so already -``` + +```shell git clone https://github.com/{username}/datahub.git ``` Change into the repository's root directory -``` + +```shell cd datahub ``` Use [gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.html) to build the project -``` + +```shell ./gradlew build ``` @@ -38,29 +58,37 @@ Note that the above will also run run tests and a number of validations which ma We suggest partially compiling DataHub according to your needs: - - Build Datahub's backend GMS (Generalized metadata service): -``` -./gradlew :metadata-service:war:build -``` - - Build Datahub's frontend: -``` -./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint -``` - - Build DataHub's command line tool: -``` -./gradlew :metadata-ingestion:installDev -``` - - Build DataHub's documentation: -``` -./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript -# To preview the documentation -./gradlew :docs-website:serve -``` +- Build Datahub's backend GMS (Generalized metadata service): + + ``` + ./gradlew :metadata-service:war:build + ``` + +- Build Datahub's frontend: + + ``` + ./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint + ``` + +- Build DataHub's command line tool: -## Deploying local versions + ``` + ./gradlew :metadata-ingestion:installDev + ``` + +- Build DataHub's documentation: + + ``` + ./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript + # To preview the documentation + ./gradlew :docs-website:serve + ``` + +## Deploying Local Versions Run just once to have the local `datahub` cli tool installed in your $PATH -``` + +```shell cd smoke-test/ python3 -m venv venv source venv/bin/activate @@ -70,34 +98,40 @@ cd ../ ``` Once you have compiled & packaged the project or appropriate module you can deploy the entire system via docker-compose by running: -``` + +```shell ./gradlew quickstart ``` Replace whatever container you want in the existing deployment. I.e, replacing datahub's backend (GMS): -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-gms) ``` Running the local version of the frontend -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-frontend-react) ``` + ## IDE Support -The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). -You can run the following command to generate or update the IntelliJ project file -``` + +The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). +You can run the following command to generate or update the IntelliJ project file. + +```shell ./gradlew idea ``` + Open `datahub.ipr` in IntelliJ to start developing! For consistency please import and auto format the code using [LinkedIn IntelliJ Java style](../gradle/idea/LinkedIn%20Style.xml). - ## Windows Compatibility -For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. +For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. Please note that we do not actively support Windows in a non-virtualized environment. If you must use Windows, one workaround is to build within a virtualized environment, such as a VM(Virtual Machine) or [WSL(Windows Subsystem for Linux)](https://learn.microsoft.com/en-us/windows/wsl). @@ -105,37 +139,41 @@ This approach can help ensure that your build environment remains isolated and s ## Common Build Issues -### Getting `Unsupported class file major version 57` +#### Getting `Unsupported class file major version 57` You're probably using a Java version that's too new for gradle. Run the following command to check your Java version -``` + +```shell java --version ``` + While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). -### Getting `cannot find symbol` error for `javax.annotation.Generated` +#### Getting `cannot find symbol` error for `javax.annotation.Generated` Similar to the previous issue, please use Java 1.8 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. -### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error +#### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error -This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). +This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). -### Various errors related to `generateDataTemplate` or other `generate` tasks +#### Various errors related to `generateDataTemplate` or other `generate` tasks -As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. +As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. -### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` +#### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` This generally means that an [incompatible change](https://linkedin.github.io/rest.li/modeling/compatibility_check) was introduced to the rest.li API in GMS. You'll need to rebuild the snapshots/IDL by running the following command once -``` + +```shell ./gradlew :metadata-service:restli-servlet-impl:build -Prest.model.compatibility=ignore ``` -### `java.io.IOException: No space left on device` +#### `java.io.IOException: No space left on device` This means you're running out of space on your disk to build. Please free up some space or try a different disk. -### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` +#### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` + This could mean that you need to update your [Yarn](https://yarnpkg.com/getting-started/install) version From f3abfd175e1c142750686b3c8f7b08acadd83a4d Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 13:21:28 -0800 Subject: [PATCH 13/23] feat(ui): Add caching to search, entity profile for better UX (#9362) --- datahub-web-react/src/Mocks.tsx | 149 +++++++++++++++--- .../styled/search/EmbeddedListSearch.tsx | 1 + .../search/EmbeddedListSearchResults.tsx | 11 +- .../containers/profile/header/EntityTabs.tsx | 1 + .../profile/useGetDataForProfile.ts | 1 + ...rateUseSearchResultsViaRelationshipHook.ts | 1 + .../src/app/search/SearchPage.tsx | 2 + .../src/app/search/SearchResultList.tsx | 4 +- .../src/app/search/SearchResults.tsx | 11 +- .../search/SearchResultsLoadingSection.tsx | 33 ++++ .../app/search/__tests__/SearchPage.test.tsx | 95 ++--------- .../src/app/search/filters/BasicFilters.tsx | 4 + .../filters/BasicFiltersLoadingSection.tsx | 27 ++++ .../src/app/search/filters/SearchFilters.tsx | 20 ++- 14 files changed, 241 insertions(+), 119 deletions(-) create mode 100644 datahub-web-react/src/app/search/SearchResultsLoadingSection.tsx create mode 100644 datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index a2e14308e8cee2..ada9a06ab5b954 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -41,10 +41,12 @@ import { FetchedEntity } from './app/lineage/types'; import { DEFAULT_APP_CONFIG } from './appConfigContext'; export const user1 = { + __typename: 'CorpUser', username: 'sdas', urn: 'urn:li:corpuser:1', type: EntityType.CorpUser, info: { + __typename: 'CorpUserInfo', email: 'sdas@domain.com', active: true, displayName: 'sdas', @@ -53,18 +55,19 @@ export const user1 = { lastName: 'Das', fullName: 'Shirshanka Das', }, - editableInfo: { - pictureLink: 'https://crunchconf.com/img/2019/speakers/1559291783-ShirshankaDas.png', - }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -74,14 +77,23 @@ export const user1 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + properties: null, + editableProperties: null, }; const user2 = { + __typename: 'CorpUser', username: 'john', urn: 'urn:li:corpuser:3', type: EntityType.CorpUser, - info: { + properties: { + __typename: 'CorpUserInfo', email: 'john@domain.com', active: true, displayName: 'john', @@ -90,25 +102,41 @@ const user2 = { lastName: 'Joyce', fullName: 'John Joyce', }, - editableInfo: { - pictureLink: null, - }, editableProperties: { displayName: 'Test', title: 'test', pictureLink: null, teams: [], skills: [], + __typename: 'CorpUserEditableProperties', + email: 'john@domain.com', + }, + groups: { + __typename: 'EntityRelationshipsResult', + relationships: [ + { + __typename: 'EntityRelationship', + entity: { + __typename: 'CorpGroup', + urn: 'urn:li:corpgroup:group1', + name: 'group1', + properties: null, + }, + }, + ], }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -118,7 +146,13 @@ const user2 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + info: null, }; export const dataPlatform = { @@ -149,6 +183,7 @@ export const dataPlatformInstance = { }; export const dataset1 = { + __typename: 'Dataset', urn: 'urn:li:dataset:1', type: EntityType.Dataset, platform: { @@ -260,6 +295,7 @@ export const dataset1 = { }; export const dataset2 = { + __typename: 'Dataset', urn: 'urn:li:dataset:2', type: EntityType.Dataset, platform: { @@ -358,17 +394,23 @@ export const dataset3 = { urn: 'urn:li:dataset:3', type: EntityType.Dataset, platform: { + __typename: 'DataPlatform', urn: 'urn:li:dataPlatform:kafka', name: 'Kafka', + displayName: 'Kafka', info: { + __typename: 'DataPlatformInfo', displayName: 'Kafka', type: PlatformType.MessageBroker, datasetNameDelimiter: '.', logoUrl: '', }, type: EntityType.DataPlatform, + lastIngested: null, + properties: null, }, privileges: { + __typename: 'EntityPrivileges', canEditLineage: false, canEditEmbed: false, canEditQueries: false, @@ -381,54 +423,78 @@ export const dataset3 = { origin: 'PROD', uri: 'www.google.com', properties: { + __typename: 'DatasetProperties', name: 'Yet Another Dataset', + qualifiedName: 'Yet Another Dataset', description: 'This and here we have yet another Dataset (YAN). Are there more?', origin: 'PROD', - customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:3' }], + customProperties: [ + { + __typename: 'CustomPropertiesEntry', + key: 'propertyAKey', + value: 'propertyAValue', + associatedUrn: 'urn:li:dataset:3', + }, + ], externalUrl: 'https://data.hub', }, parentContainers: { + __typename: 'ParentContainersResult', count: 0, containers: [], }, editableProperties: null, created: { + __typename: 'AuditStamp', time: 0, + actor: null, }, lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, ownership: { + __typename: 'Ownership', owners: [ { + __typename: 'Owner', owner: { ...user1, }, type: 'DATAOWNER', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, { + __typename: 'Owner', owner: { ...user2, }, type: 'DELEGATE', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, ], lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, }, globalTags: { __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -439,14 +505,18 @@ export const dataset3 = { ], }, glossaryTerms: { + __typename: 'GlossaryTerms', terms: [ { + __typename: 'GlossaryTermAssociation', term: { + __typename: 'GlossaryTerm', type: EntityType.GlossaryTerm, urn: 'urn:li:glossaryTerm:sample-glossary-term', name: 'sample-glossary-term', hierarchicalName: 'example.sample-glossary-term', properties: { + __typename: 'GlossaryTermProperties', name: 'sample-glossary-term', description: 'sample definition', definition: 'sample definition', @@ -463,13 +533,21 @@ export const dataset3 = { incoming: null, outgoing: null, institutionalMemory: { + __typename: 'InstitutionalMemory', elements: [ { + __typename: 'InstitutionalMemoryMetadata', url: 'https://www.google.com', - author: { urn: 'urn:li:corpuser:datahub', username: 'datahub', type: EntityType.CorpUser }, + author: { + __typename: 'CorpUser', + urn: 'urn:li:corpuser:datahub', + username: 'datahub', + type: EntityType.CorpUser, + }, description: 'This only points to Google', label: 'This only points to Google', created: { + __typename: 'AuditStamp', actor: 'urn:li:corpuser:1', time: 1612396473001, }, @@ -482,12 +560,14 @@ export const dataset3 = { operations: null, datasetProfiles: [ { + __typename: 'DatasetProfile', rowCount: 10, columnCount: 5, sizeInBytes: 10000, timestampMillis: 0, fieldProfiles: [ { + __typename: 'DatasetFieldProfile', fieldPath: 'testColumn', uniqueCount: 1, uniqueProportion: 0.129, @@ -507,6 +587,7 @@ export const dataset3 = { viewProperties: null, autoRenderAspects: [ { + __typename: 'AutoRenderAspect', aspectName: 'autoRenderAspect', payload: '{ "values": [{ "autoField1": "autoValue1", "autoField2": "autoValue2" }] }', renderSpec: { @@ -529,7 +610,11 @@ export const dataset3 = { siblings: null, statsSummary: null, embed: null, - browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + browsePathV2: { __typename: 'BrowsePathV2', path: [{ name: 'test', entity: null }] }, + access: null, + dataProduct: null, + lastProfile: null, + lastOperation: null, } as Dataset; export const dataset3WithSchema = { @@ -1839,7 +1924,6 @@ export const mocks = [ browse: { entities: [ { - __typename: 'Dataset', ...dataset1, }, ], @@ -1986,7 +2070,6 @@ export const mocks = [ searchResults: [ { entity: { - __typename: 'Dataset', ...dataset1, }, matchedFields: [ @@ -1999,7 +2082,6 @@ export const mocks = [ }, { entity: { - __typename: 'Dataset', ...dataset2, }, }, @@ -2075,6 +2157,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2248,6 +2331,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2259,10 +2343,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -2270,6 +2356,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2278,12 +2365,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -2829,6 +2917,7 @@ export const mocks = [ // ], // }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2908,6 +2997,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3205,6 +3295,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3216,10 +3307,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -3227,6 +3320,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3235,12 +3329,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3290,6 +3385,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3301,6 +3397,7 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ @@ -3308,10 +3405,12 @@ export const mocks = [ value: 'PROD', count: 3, entity: null, + __typename: 'AggregationMetadata', }, ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3320,12 +3419,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3367,6 +3467,7 @@ export const mocks = [ __typename: 'AuthenticatedUser', corpUser: { ...user2 }, platformPrivileges: { + __typename: 'PlatformPrivileges', viewAnalytics: true, managePolicies: true, manageIdentities: true, diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx index e27a63b98f0129..26228e8c445155 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx @@ -188,6 +188,7 @@ export const EmbeddedListSearch = ({ variables: { input: searchInput, }, + fetchPolicy: 'cache-first', }); useEffect(() => { diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index e4d43f34dcba74..1daf2a4c59b70f 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,14 +1,15 @@ import React from 'react'; import { Pagination, Typography } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; import { SearchCfg } from '../../../../../../conf'; -import { ReactComponent as LoadingSvg } from '../../../../../../images/datahub-logo-color-loading_pendulum.svg'; import { EntityAndType } from '../../../types'; import { UnionType } from '../../../../../search/utils/constants'; import { SearchFiltersSection } from '../../../../../search/SearchFiltersSection'; import { EntitySearchResults, EntityActionProps } from './EntitySearchResults'; import MatchingViewsLabel from './MatchingViewsLabel'; +import { ANTD_GRAY } from '../../../constants'; const SearchBody = styled.div` height: 100%; @@ -59,6 +60,12 @@ const LoadingContainer = styled.div` flex: 1; `; +const StyledLoading = styled(LoadingOutlined)` + font-size: 36px; + color: ${ANTD_GRAY[7]}; + padding-bottom: 18px; +]`; + interface Props { page: number; searchResponse?: SearchResultType | null; @@ -121,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + )} {!loading && ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 096f1db617d92a..58693eca8af0e8 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( routeToTab({ tabName: tab })} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts index 5a7d4f24dfd2a8..ae87eeb1a84507 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts +++ b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts @@ -32,6 +32,7 @@ export default function useGetDataForProfile({ urn, entityType, useEntityQuer refetch, } = useEntityQuery({ variables: { urn }, + fetchPolicy: 'cache-first', }); const dataPossiblyCombinedWithSiblings = isHideSiblingMode diff --git a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts index f3b904956b224c..e26aa01c385e81 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts @@ -45,6 +45,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ variables: { input: inputFields, }, + fetchPolicy: 'cache-first', skip: !filtersExist(filters, orFilters), // If you don't include any filters, we shound't return anything :). Might as well skip! }); diff --git a/datahub-web-react/src/app/search/SearchPage.tsx b/datahub-web-react/src/app/search/SearchPage.tsx index 6387f0ef8c05ec..541355a3e2cb47 100644 --- a/datahub-web-react/src/app/search/SearchPage.tsx +++ b/datahub-web-react/src/app/search/SearchPage.tsx @@ -62,6 +62,7 @@ export const SearchPage = () => { searchFlags: { getSuggestions: true }, }, }, + fetchPolicy: 'cache-and-network', }); const total = data?.searchAcrossEntities?.total || 0; @@ -217,6 +218,7 @@ export const SearchPage = () => { )} {showSearchFiltersV2 && ( ` `; type Props = { + loading: boolean; query: string; searchResults: CombinedSearchResult[]; totalResultCount: number; @@ -64,6 +65,7 @@ type Props = { }; export const SearchResultList = ({ + loading, query, searchResults, totalResultCount, @@ -104,7 +106,7 @@ export const SearchResultList = ({ id="search-result-list" dataSource={searchResults} split={false} - locale={{ emptyText: }} + locale={{ emptyText: (!loading && ) || <> }} renderItem={(item, index) => ( ` display: flex; @@ -109,6 +109,7 @@ const SearchResultListContainer = styled.div<{ v2Styles: boolean }>` `; interface Props { + loading: boolean; unionType?: UnionType; query: string; viewUrn?: string; @@ -124,7 +125,6 @@ interface Props { } | null; facets?: Array | null; selectedFilters: Array; - loading: boolean; error: any; onChangeFilters: (filters: Array) => void; onChangeUnionType: (unionType: UnionType) => void; @@ -142,6 +142,7 @@ interface Props { } export const SearchResults = ({ + loading, unionType = UnionType.AND, query, viewUrn, @@ -149,7 +150,6 @@ export const SearchResults = ({ searchResponse, facets, selectedFilters, - loading, error, onChangeUnionType, onChangeFilters, @@ -180,7 +180,6 @@ export const SearchResults = ({ return ( <> - {loading && } {!showSearchFiltersV2 && ( @@ -247,10 +246,12 @@ export const SearchResults = ({ )} {(error && ) || - (!loading && ( + (loading && !combinedSiblingSearchResults.length && ) || + (combinedSiblingSearchResults && ( {totalResults > 0 && } + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx index 0111a264d1e17f..5d921c82913acb 100644 --- a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx +++ b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx @@ -1,42 +1,23 @@ import React from 'react'; -import { act } from 'react-dom/test-utils'; -import { fireEvent, render, waitFor } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; +import { InMemoryCache } from '@apollo/client'; import { MockedProvider } from '@apollo/client/testing'; import { Route } from 'react-router'; - import { SearchPage } from '../SearchPage'; import TestPageContainer from '../../../utils/test-utils/TestPageContainer'; import { mocksWithSearchFlagsOff } from '../../../Mocks'; import { PageRoutes } from '../../../conf/Global'; +import possibleTypesResult from '../../../possibleTypes.generated'; -describe('SearchPage', () => { - it('renders loading', async () => { - const promise = Promise.resolve(); - const { getByText } = render( - - - } /> - - , - ); - await waitFor(() => expect(getByText('Loading...')).toBeInTheDocument()); - await act(() => promise); - }); +const cache = new InMemoryCache({ + // need to define possibleTypes to allow us to use Apollo cache with union types + possibleTypes: possibleTypesResult.possibleTypes, +}); +describe('SearchPage', () => { it('renders the selected filters as checked', async () => { const { getByTestId, queryByTestId } = render( - + @@ -56,14 +37,7 @@ describe('SearchPage', () => { it('renders the selected filters as checked using legacy URL scheme for entity (entity instead of _entityType)', async () => { const { getByTestId, queryByTestId } = render( - + @@ -83,14 +57,7 @@ describe('SearchPage', () => { it('renders multiple checked filters at once', async () => { const { getByTestId, queryByTestId } = render( - + @@ -108,44 +75,4 @@ describe('SearchPage', () => { const hdfsPlatformBox = getByTestId('facet-platform-hdfs'); expect(hdfsPlatformBox).toHaveProperty('checked', true); }); - - it('clicking a filter selects a new filter', async () => { - const promise = Promise.resolve(); - const { getByTestId, queryByTestId } = render( - - - } /> - - , - ); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox).toHaveProperty('checked', true); - - const chartEntityBox = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox).toHaveProperty('checked', false); - act(() => { - fireEvent.click(chartEntityBox); - }); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox2 = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox2).toHaveProperty('checked', true); - - const chartEntityBox2 = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox2).toHaveProperty('checked', true); - await act(() => promise); - }); }); diff --git a/datahub-web-react/src/app/search/filters/BasicFilters.tsx b/datahub-web-react/src/app/search/filters/BasicFilters.tsx index e8f56e5c2cd5e4..84750387853bb1 100644 --- a/datahub-web-react/src/app/search/filters/BasicFilters.tsx +++ b/datahub-web-react/src/app/search/filters/BasicFilters.tsx @@ -24,6 +24,7 @@ import { } from '../../onboarding/config/SearchOnboardingConfig'; import { useFilterRendererRegistry } from './render/useFilterRenderer'; import { FilterScenarioType } from './render/types'; +import BasicFiltersLoadingSection from './BasicFiltersLoadingSection'; const NUM_VISIBLE_FILTER_DROPDOWNS = 5; @@ -56,6 +57,7 @@ const FILTERS_TO_REMOVE = [ ]; interface Props { + loading: boolean; availableFilters: FacetMetadata[] | null; activeFilters: FacetFilterInput[]; onChangeFilters: (newFilters: FacetFilterInput[]) => void; @@ -64,6 +66,7 @@ interface Props { } export default function BasicFilters({ + loading, availableFilters, activeFilters, onChangeFilters, @@ -88,6 +91,7 @@ export default function BasicFilters({ + {loading && !visibleFilters?.length && } {visibleFilters?.map((filter) => { return filterRendererRegistry.hasRenderer(filter.field) ? ( filterRendererRegistry.render(filter.field, { diff --git a/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx new file mode 100644 index 00000000000000..f82a66d4f0c6d5 --- /dev/null +++ b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx @@ -0,0 +1,27 @@ +import * as React from 'react'; +import { Skeleton } from 'antd'; +import styled from 'styled-components'; + +const Container = styled.div` + display: flex; + align-items: center; +`; + +const CardSkeleton = styled(Skeleton.Input)` + && { + padding: 2px 12px 2px 0px; + height: 32px; + border-radius: 8px; + } +`; + +export default function BasicFiltersLoadingSection() { + return ( + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/filters/SearchFilters.tsx b/datahub-web-react/src/app/search/filters/SearchFilters.tsx index 97e71ae701aace..bcc987159e0e6e 100644 --- a/datahub-web-react/src/app/search/filters/SearchFilters.tsx +++ b/datahub-web-react/src/app/search/filters/SearchFilters.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata } from '../../../types.generated'; import { ANTD_GRAY } from '../../entity/shared/constants'; @@ -13,6 +13,7 @@ const SearchFiltersWrapper = styled.div<{ removePadding: boolean }>` `; interface Props { + loading: boolean; mode: FilterMode; availableFilters: FacetMetadata[]; activeFilters: FacetFilterInput[]; @@ -24,6 +25,7 @@ interface Props { } export default function SearchFilters({ + loading, mode, availableFilters, activeFilters, @@ -33,6 +35,17 @@ export default function SearchFilters({ onChangeUnionType, onChangeMode, }: Props) { + const [finalAvailableFilters, setFinalAvailableFilters] = useState(availableFilters); + + /** + * Only update the active filters if we are done loading. Prevents jitter! + */ + useEffect(() => { + if (!loading && finalAvailableFilters !== availableFilters) { + setFinalAvailableFilters(availableFilters); + } + }, [availableFilters, loading, finalAvailableFilters]); + const isShowingBasicFilters = mode === FilterModes.BASIC; return ( {isShowingBasicFilters && ( Date: Fri, 1 Dec 2023 13:21:54 -0800 Subject: [PATCH 14/23] =?UTF-8?q?refactor(ui):=20Remove=20primary=20color?= =?UTF-8?q?=20for=20sort=20selector=20+=20add=20t=E2=80=A6=20(#9363)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/app/search/context/constants.ts | 8 ++-- .../app/search/sorting/SearchSortSelect.tsx | 42 ++++++++++--------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/datahub-web-react/src/app/search/context/constants.ts b/datahub-web-react/src/app/search/context/constants.ts index 5f841b8536e196..96e5d7c7872031 100644 --- a/datahub-web-react/src/app/search/context/constants.ts +++ b/datahub-web-react/src/app/search/context/constants.ts @@ -7,19 +7,19 @@ export const LAST_OPERATION_TIME_FIELD = 'lastOperationTime'; export const DEFAULT_SORT_OPTION = RELEVANCE; export const SORT_OPTIONS = { - [RELEVANCE]: { label: 'Relevance', field: RELEVANCE, sortOrder: SortOrder.Descending }, + [RELEVANCE]: { label: 'Relevance (Default)', field: RELEVANCE, sortOrder: SortOrder.Descending }, [`${ENTITY_NAME_FIELD}_${SortOrder.Ascending}`]: { - label: 'A to Z', + label: 'Name A to Z', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Ascending, }, [`${ENTITY_NAME_FIELD}_${SortOrder.Descending}`]: { - label: 'Z to A', + label: 'Name Z to A', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Descending, }, [`${LAST_OPERATION_TIME_FIELD}_${SortOrder.Descending}`]: { - label: 'Last Modified in Platform', + label: 'Last Modified In Source', field: LAST_OPERATION_TIME_FIELD, sortOrder: SortOrder.Descending, }, diff --git a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx index 683292a20b5b4f..fc9486926214fb 100644 --- a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx +++ b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx @@ -1,8 +1,9 @@ import Icon, { CaretDownFilled } from '@ant-design/icons'; -import { Select } from 'antd'; +import { Select, Tooltip } from 'antd'; import React from 'react'; import styled from 'styled-components'; import { ReactComponent as SortIcon } from '../../../images/sort.svg'; +import { ANTD_GRAY } from '../../entity/shared/constants'; import { DEFAULT_SORT_OPTION, SORT_OPTIONS } from '../context/constants'; import { useSearchContext } from '../context/SearchContext'; @@ -13,19 +14,20 @@ const SelectWrapper = styled.span` .ant-select-selection-item { // !important is necessary because updating Select styles for antd is impossible - color: ${(props) => props.theme.styles['primary-color']} !important; + color: ${ANTD_GRAY[8]} !important; font-weight: 700; } - svg { - color: ${(props) => props.theme.styles['primary-color']}; + .ant-select-selection-placeholder { + color: ${ANTD_GRAY[8]}; + font-weight: 700; } `; const StyledIcon = styled(Icon)` - color: ${(props) => props.theme.styles['primary-color']}; + color: ${ANTD_GRAY[8]}; font-size: 16px; - margin-right: -6px; + margin-right: -8px; `; export default function SearchSortSelect() { @@ -34,18 +36,20 @@ export default function SearchSortSelect() { const options = Object.entries(SORT_OPTIONS).map(([value, option]) => ({ value, label: option.label })); return ( - - - setSelectedSortOption(sortOption)} + dropdownStyle={{ minWidth: 'max-content' }} + placement="bottomRight" + suffixIcon={} + /> + + ); } From 14a463b1ce1b5b60bea8496f5f4aee16b8b7aa39 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 15:53:01 -0800 Subject: [PATCH 15/23] feat(ui): Supporting subtypes for data jobs (#9361) Co-authored-by: Andrew Sikowitz --- .../datahub/graphql/GmsGraphQLEngine.java | 8 ---- .../datahub/graphql/SubTypesResolver.java | 47 ------------------- .../graphql/types/chart/ChartType.java | 3 +- .../types/chart/mappers/ChartMapper.java | 4 ++ .../types/common/mappers/SubTypesMapper.java | 22 +++++++++ .../container/mappers/ContainerMapper.java | 9 +--- .../dashboard/mappers/DashboardMapper.java | 13 ++--- .../graphql/types/datajob/DataJobType.java | 3 +- .../types/datajob/mappers/DataJobMapper.java | 4 ++ .../graphql/types/dataset/DatasetType.java | 3 +- .../types/dataset/mappers/DatasetMapper.java | 4 ++ .../src/main/resources/entity.graphql | 5 ++ .../src/app/entity/chart/ChartEntity.tsx | 1 + .../src/app/entity/dataJob/DataJobEntity.tsx | 2 + .../app/entity/dataJob/preview/Preview.tsx | 4 +- datahub-web-react/src/graphql/browse.graphql | 6 +++ .../src/graphql/dataProcess.graphql | 3 ++ .../src/graphql/fragments.graphql | 9 ++++ datahub-web-react/src/graphql/lineage.graphql | 3 ++ datahub-web-react/src/graphql/preview.graphql | 6 +++ datahub-web-react/src/graphql/scroll.graphql | 6 +++ datahub-web-react/src/graphql/search.graphql | 3 ++ .../src/main/resources/entity-registry.yml | 1 + 23 files changed, 93 insertions(+), 76 deletions(-) delete mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index b0b26f073876c4..9ea8126a07ab2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1174,10 +1174,6 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("testResults", new TestResultsResolver(entityClient)) .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "dataset", - "subTypes")) .dataFetcher("runs", new EntityRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) @@ -1433,10 +1429,6 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "chart", - "subTypes")) ); builder.type("ChartInfo", typeWiring -> typeWiring .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java deleted file mode 100644 index c74d84d8be3230..00000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.linkedin.datahub.graphql; - -import com.linkedin.common.SubTypes; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.r2.RemoteInvocationException; -import graphql.schema.DataFetcher; -import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; -import java.util.Collections; -import java.util.concurrent.CompletableFuture; -import javax.annotation.Nullable; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@AllArgsConstructor -public class SubTypesResolver implements DataFetcher> { - - EntityClient _entityClient; - String _entityType; - String _aspectName; - - @Override - @Nullable - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - SubTypes subType = null; - final String urnStr = ((Entity) environment.getSource()).getUrn(); - try { - final Urn urn = Urn.createFromString(urnStr); - EntityResponse entityResponse = _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(_aspectName), context.getAuthentication()).get(urn); - if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { - subType = new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); - } - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); - } - return subType; - }); - } -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index cfec8f8a2391f6..fa0e3cd8568033 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -77,7 +77,8 @@ public class ChartType implements SearchableEntityType, Browsable INPUT_FIELDS_ASPECT_NAME, EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 657c9b688aed20..e0ffc57ddf5194 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AccessLevel; @@ -34,6 +35,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -97,6 +99,8 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) { chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java new file mode 100644 index 00000000000000..9aa94eae629990 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.ArrayList; +import javax.annotation.Nonnull; + +public class SubTypesMapper implements ModelMapper { + + public static final SubTypesMapper INSTANCE = new SubTypesMapper(); + + public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + return INSTANCE.apply(metadata); + } + + @Override + public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { + final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + result.setTypeNames(new ArrayList<>(input.getTypeNames())); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index aeaa8f4f85c144..b81259e78be3e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -21,6 +21,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -97,7 +98,7 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); @@ -150,12 +151,6 @@ private static com.linkedin.datahub.graphql.generated.ContainerEditablePropertie return editableContainerProperties; } - private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) { - final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(gmsSubTypes.getTypeNames()); - return subTypes; - } - private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { // Set dummy platform to be resolved. final DataPlatform dummyPlatform = new DataPlatform(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 32e4341ece4aac..432624ac4699f4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -33,6 +33,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -91,7 +92,8 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) { dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> @@ -204,13 +206,4 @@ private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) final Domains domains = new Domains(dataMap); dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); } - - private void mapSubTypes(@Nonnull Dashboard dashboard, DataMap dataMap) { - SubTypes pegasusSubTypes = new SubTypes(dataMap); - if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); - dashboard.setSubTypes(subTypes); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index bde79f6dce6e8c..f6f37978bb36aa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -75,7 +75,8 @@ public class DataJobType implements SearchableEntityType, Brows DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("flow"); private final EntityClient _entityClient; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 4845fc18763488..61802ad9cfe5c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -9,6 +9,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataFlow; @@ -27,6 +28,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -103,6 +105,8 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); } }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 0fc4399ac902d7..6f339d3985133f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -86,7 +86,8 @@ public class DatasetType implements SearchableEntityType, Brows EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 4867aa1d89825f..3e39c14c29ede1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.Siblings; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -29,6 +30,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -114,6 +116,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 035f756a10d557..4f3769d9088156 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -5689,6 +5689,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { """ type: EntityType! + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + """ The timestamp for the last time this entity was ingested """ diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index fc898dec9d93af..d2d35aad7c29fd 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -168,6 +168,7 @@ export class ChartEntity implements Entity { return ( { { Date: Mon, 4 Dec 2023 09:50:46 +0100 Subject: [PATCH 16/23] fix(ingest/bigquery): Fix format arguments for table lineage test (#9340) (#9341) --- .../src/datahub/ingestion/source/bigquery_v2/lineage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index e9acf5ea860445..eddd08c92b808d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -894,8 +894,8 @@ def test_capability(self, project_id: str) -> None: for entry in self.audit_log_api.get_bigquery_log_entries_via_gcp_logging( gcp_logging_client, filter=BQ_FILTER_RULE_TEMPLATE_V2_LINEAGE.format( - self.start_time.strftime(BQ_DATETIME_FORMAT), - self.end_time.strftime(BQ_DATETIME_FORMAT), + start_time=self.start_time.strftime(BQ_DATETIME_FORMAT), + end_time=self.end_time.strftime(BQ_DATETIME_FORMAT), ), log_page_size=self.config.log_page_size, limit=1, From 7857944bb52ff29ee7d30d8fba21262aa4510b0a Mon Sep 17 00:00:00 2001 From: ethan-cartwright Date: Mon, 4 Dec 2023 11:32:45 -0500 Subject: [PATCH 17/23] fix(siblingsHook): add logic to account for non dbt upstreams (#9154) Co-authored-by: Ethan Cartwright --- .../hook/siblings/SiblingAssociationHook.java | 3 +-- .../siblings/SiblingAssociationHookTest.java | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 7cbe53dee9fe4b..064f987ff1ba92 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -205,9 +205,8 @@ private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn source // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); - } else { + } else if (dbtUpstreams.size() > 1) { log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); - } } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 6a2a05aa4b8c0d..93e98b7343cd4f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -305,6 +305,28 @@ public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } + @Test + public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { + + MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + final UpstreamLineage upstreamLineage = new UpstreamLineage(); + final UpstreamArray upstreamArray = new UpstreamArray(); + Upstream snowflakeUpstream1 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream2 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + upstreamArray.add(snowflakeUpstream1); + upstreamArray.add(snowflakeUpstream2); + upstreamLineage.setUpstreams(upstreamArray); + + event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); + event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + _siblingAssociationHook.invoke(event); + + + Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); + } + private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); From f9b24e07241bd5dc3e6d93698a90000fc08150fb Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Mon, 4 Dec 2023 22:58:41 +0530 Subject: [PATCH 18/23] feat: Support CSV ingestion through the UI (#9280) Co-authored-by: Gabe Lyons --- .../src/app/ingest/source/builder/CSVInfo.tsx | 27 ++++++++ .../ingest/source/builder/RecipeBuilder.tsx | 5 +- .../source/builder/RecipeForm/constants.ts | 8 ++- .../ingest/source/builder/RecipeForm/csv.ts | 60 ++++++++++++++++++ .../app/ingest/source/builder/constants.ts | 4 ++ .../app/ingest/source/builder/sources.json | 7 ++ .../src/app/ingest/source/conf/csv/csv.ts | 22 +++++++ .../src/app/ingest/source/conf/sources.tsx | 2 + datahub-web-react/src/images/csv-logo.png | Bin 0 -> 12029 bytes .../main/resources/boot/data_platforms.json | 10 +++ 10 files changed, 143 insertions(+), 2 deletions(-) create mode 100644 datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx create mode 100644 datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts create mode 100644 datahub-web-react/src/app/ingest/source/conf/csv/csv.ts create mode 100644 datahub-web-react/src/images/csv-logo.png diff --git a/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx new file mode 100644 index 00000000000000..87d632bb228b5f --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx @@ -0,0 +1,27 @@ +import React from 'react'; +import { Alert } from 'antd'; + +const CSV_FORMAT_LINK = 'https://datahubproject.io/docs/generated/ingestion/sources/csv'; + +export const CSVInfo = () => { + const link = ( + + link + + ); + + return ( + + Add the URL of your CSV file to be ingested. This will work for any web-hosted CSV file. For + example, You can create a file in google sheets following the format at this {link} and then + construct the CSV URL by publishing your google sheet in the CSV format. + + } + /> + ); +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx index bee9b04cee1007..db1f0fdd4dfa6b 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx @@ -7,8 +7,9 @@ import { ANTD_GRAY } from '../../../entity/shared/constants'; import { YamlEditor } from './YamlEditor'; import RecipeForm from './RecipeForm/RecipeForm'; import { SourceBuilderState, SourceConfig } from './types'; -import { LOOKER, LOOK_ML } from './constants'; +import { CSV, LOOKER, LOOK_ML } from './constants'; import { LookerWarning } from './LookerWarning'; +import { CSVInfo } from './CSVInfo'; export const ControlsContainer = styled.div` display: flex; @@ -81,6 +82,8 @@ function RecipeBuilder(props: Props) { return (
{(type === LOOKER || type === LOOK_ML) && } + {type === CSV && } + {sourceConfigs?.displayName} Recipe diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts index 351876fe6b16aa..844bf50926764a 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts @@ -83,7 +83,7 @@ import { PROJECT_NAME, } from './lookml'; import { PRESTO, PRESTO_HOST_PORT, PRESTO_DATABASE, PRESTO_USERNAME, PRESTO_PASSWORD } from './presto'; -import { BIGQUERY_BETA, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; +import { BIGQUERY_BETA, CSV, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; import { BIGQUERY_BETA_PROJECT_ID, DATASET_ALLOW, DATASET_DENY, PROJECT_ALLOW, PROJECT_DENY } from './bigqueryBeta'; import { MYSQL_HOST_PORT, MYSQL_PASSWORD, MYSQL_USERNAME } from './mysql'; import { MSSQL, MSSQL_DATABASE, MSSQL_HOST_PORT, MSSQL_PASSWORD, MSSQL_USERNAME } from './mssql'; @@ -140,6 +140,7 @@ import { INCLUDE_VIEW_LINEAGE, INCLUDE_PROJECTIONS_LINEAGE, } from './vertica'; +import { CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_FILE_URL, CSV_WRITE_SEMANTICS } from './csv'; export enum RecipeSections { Connection = 0, @@ -453,6 +454,11 @@ export const RECIPE_FIELDS: RecipeFields = { ], filterSectionTooltip: 'Include or exclude specific Schemas, Tables, Views and Projections from ingestion.', }, + [CSV]: { + fields: [CSV_FILE_URL], + filterFields: [], + advancedFields: [CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_WRITE_SEMANTICS], + }, }; export const CONNECTORS_WITH_FORM = new Set(Object.keys(RECIPE_FIELDS)); diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts new file mode 100644 index 00000000000000..fba4f3b9d01641 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts @@ -0,0 +1,60 @@ +import { RecipeField, FieldType } from './common'; + +const validateURL = (fieldName) => { + return { + validator(_, value) { + const URLPattern = new RegExp(/^(?:http(s)?:\/\/)?[\w.-]+(?:\.[\w.-]+)+[\w\-._~:/?#[\]@!$&'()*+,;=.]+$/); + const isURLValid = URLPattern.test(value); + if (!value || isURLValid) { + return Promise.resolve(); + } + return Promise.reject(new Error(`A valid ${fieldName} is required.`)); + }, + }; +}; + +export const CSV_FILE_URL: RecipeField = { + name: 'filename', + label: 'File URL', + tooltip: 'File URL of the CSV file to ingest.', + type: FieldType.TEXT, + fieldPath: 'source.config.filename', + placeholder: 'File URL', + required: true, + rules: [() => validateURL('File URL')], +}; + +export const CSV_ARRAY_DELIMITER: RecipeField = { + name: 'array_delimiter', + label: 'Array delimiter', + tooltip: 'Delimiter to use when parsing array fields (tags, terms and owners)', + type: FieldType.TEXT, + fieldPath: 'source.config.array_delimiter', + placeholder: 'Array delimiter', + rules: null, +}; + +export const CSV_DELIMITER: RecipeField = { + name: 'delimiter', + label: 'Delimiter', + tooltip: 'Delimiter to use when parsing CSV', + type: FieldType.TEXT, + fieldPath: 'source.config.delimiter', + placeholder: 'Delimiter', + rules: null, +}; + +export const CSV_WRITE_SEMANTICS: RecipeField = { + name: 'write_semantics', + label: 'Write Semantics', + tooltip: + 'Whether the new tags, terms and owners to be added will override the existing ones added only by this source or not. Value for this config can be "PATCH" or "OVERRIDE"', + type: FieldType.SELECT, + options: [ + { label: 'PATCH', value: 'PATCH' }, + { label: 'OVERRIDE', value: 'OVERRIDE' }, + ], + fieldPath: 'source.config.write_semantics', + placeholder: 'Write Semantics', + rules: null, +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index fdb094d721304b..08538729de40b5 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -30,6 +30,7 @@ import verticaLogo from '../../../../images/verticalogo.png'; import mlflowLogo from '../../../../images/mlflowlogo.png'; import dynamodbLogo from '../../../../images/dynamodblogo.png'; import fivetranLogo from '../../../../images/fivetranlogo.png'; +import csvLogo from '../../../../images/csv-logo.png'; export const ATHENA = 'athena'; export const ATHENA_URN = `urn:li:dataPlatform:${ATHENA}`; @@ -108,6 +109,8 @@ export const VERTICA = 'vertica'; export const VERTICA_URN = `urn:li:dataPlatform:${VERTICA}`; export const FIVETRAN = 'fivetran'; export const FIVETRAN_URN = `urn:li:dataPlatform:${FIVETRAN}`; +export const CSV = 'csv-enricher'; +export const CSV_URN = `urn:li:dataPlatform:${CSV}`; export const PLATFORM_URN_TO_LOGO = { [ATHENA_URN]: athenaLogo, @@ -142,6 +145,7 @@ export const PLATFORM_URN_TO_LOGO = { [UNITY_CATALOG_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, [FIVETRAN_URN]: fivetranLogo, + [CSV_URN]: csvLogo, }; export const SOURCE_TO_PLATFORM_URN = { diff --git a/datahub-web-react/src/app/ingest/source/builder/sources.json b/datahub-web-react/src/app/ingest/source/builder/sources.json index 9619abebbd54e6..2dc2598c1a0abc 100644 --- a/datahub-web-react/src/app/ingest/source/builder/sources.json +++ b/datahub-web-react/src/app/ingest/source/builder/sources.json @@ -223,6 +223,13 @@ "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/fivetran/", "recipe": "source:\n type: fivetran\n config:\n # Fivetran log connector destination server configurations\n fivetran_log_config:\n destination_platform: snowflake\n destination_config:\n # Coordinates\n account_id: snowflake_account_id\n warehouse: warehouse_name\n database: snowflake_db\n log_schema: fivetran_log_schema\n\n # Credentials\n username: ${SNOWFLAKE_USER}\n password: ${SNOWFLAKE_PASS}\n role: snowflake_role\n\n # Optional - filter for certain connector names instead of ingesting everything.\n # connector_patterns:\n # allow:\n # - connector_name\n\n # Optional -- This mapping is optional and only required to configure platform-instance for source\n # A mapping of Fivetran connector id to data platform instance\n # sources_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV\n\n # Optional -- This mapping is optional and only required to configure platform-instance for destination.\n # A mapping of Fivetran destination id to data platform instance\n # destination_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV" }, + { + "urn": "urn:li:dataPlatform:csv-enricher", + "name": "csv-enricher", + "displayName": "CSV", + "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/csv'", + "recipe": "source: \n type: csv-enricher \n config: \n # URL of your csv file to ingest \n filename: \n array_delimiter: '|' \n delimiter: ',' \n write_semantics: PATCH" + }, { "urn": "urn:li:dataPlatform:custom", "name": "custom", diff --git a/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts new file mode 100644 index 00000000000000..e1dc22c086fb43 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts @@ -0,0 +1,22 @@ +import { SourceConfig } from '../types'; +import csvLogo from '../../../../../images/csv-logo.png'; + +const placeholderRecipe = `\ +source: + type: csv-enricher + config: + filename: # URL of your csv file to ingest, e.g. https://docs.google.com/spreadsheets/d/DOCID/export?format=csv + array_delimiter: | + delimiter: , + write_semantics: PATCH +`; + +const csvConfig: SourceConfig = { + type: 'csv-enricher', + placeholderRecipe, + displayName: 'CSV', + docsUrl: 'https://datahubproject.io/docs/generated/ingestion/sources/csv', + logoUrl: csvLogo, +}; + +export default csvConfig; diff --git a/datahub-web-react/src/app/ingest/source/conf/sources.tsx b/datahub-web-react/src/app/ingest/source/conf/sources.tsx index a3cdb0a8f58433..4dbeeb5c975e9d 100644 --- a/datahub-web-react/src/app/ingest/source/conf/sources.tsx +++ b/datahub-web-react/src/app/ingest/source/conf/sources.tsx @@ -16,6 +16,7 @@ import { SourceConfig } from './types'; import hiveConfig from './hive/hive'; import oracleConfig from './oracle/oracle'; import tableauConfig from './tableau/tableau'; +import csvConfig from './csv/csv'; const baseUrl = window.location.origin; @@ -46,6 +47,7 @@ export const SOURCE_TEMPLATE_CONFIGS: Array<SourceConfig> = [ glueConfig, oracleConfig, hiveConfig, + csvConfig, { type: 'custom', placeholderRecipe: DEFAULT_PLACEHOLDER_RECIPE, diff --git a/datahub-web-react/src/images/csv-logo.png b/datahub-web-react/src/images/csv-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..b5fdc189cf58ec8c62a14ff58fcc271c36bb1a4f GIT binary patch literal 12029 zcmdUVbyQUGx9=H-loXVdlt!eKln@XYT0kiYNf}@O0f!DnLPcPZZfPkMkeCq=L_$L8 z8XBcL2bg(>-@WhMx9(f-ci%sE-TT&pb>PH4-?R5;$9L~fjG?|J^(EFz002<KwA76N zfC&7P2p}f~5Bq*YIPgH>sb%gB05okEzmN`(5(n@ki_bkXA7c+EALQewjsOygyy=2) z^|pWP>3GxQsdL8W9aaF~0$}QQP5d%9uz{&r4}y;fM-$ai5%F5jjoP8vbhU!>d2kZ$ zxdIB5gF&W*6a(F*=mSGq<w>Q9!<DlM3-{Cg;vqE>w)<L6^KZIzqv^SE@gK$>T3-D% z!tlyZg@i90NbAY|8WOs$qW*MD%DXCfYy9Zc*Q%kOtGanDkXq^#zwTQVC>qZTL;WZI zto%})roLjtEAH3VikXZrNd9${#!hTE?S3T|l50Lv6~tXQuok`JHcO%6JS_?{dYuY{ zXp@gJ)MpN7G$kX|?%4ub8{L$DB%S|8g;DODio&Ak!)xDgo;ck+JKn@(xHwzK8-1`j ztAi9TVqfw0O4``YZC0Cq@x8O9G4ViXi*#ZlGm8+x6beg!YupgHdAf&qpCt#4?~d%= zE7`eCWsbUKbsJurXjXlajA3)pw9Yh&nX@~?YY*tY6HL0<jgEP?L?WET+~0#Hfx)59 zOIQ`OBv7#T9m5u1AnoZkO<f}bRgvG@L#Uu8!nO)ym;qjxFc&n87e?X$#PgPhQ2{94 ztML#OQ~{ABpoKC~S;DaWf?%_dP}IAGd;T)Hnvy`xho;~8yNJ6eNeQzU|GHewUB-VR zfE0kCE_DK2Fe3=dfB3fl);F8<p&_*;-yR6iyE0vtWoS+0=14&5pjfYNzVKt)mV?*U zmX8PNOtY6BFa!IZqc^kK-ZrG%-JoxMA*C)clg+Dl{|=EXL^j-LrI)JmjGVG#rtfpb zf<LloWKI;=Fm67!A$#;nnav0-c-IfA@CV^ofBF%Vaer-F&-!#dHIT*AV%l9x+m2#5 z=4c5UVG#vU@qJOIPkgfrO$`%zsWWcquRm>7a+?{b8C`5Y-s7r8)4+G$V=@d~x2bIc zUrOz6su)Q{RD*|#Z$K^?dj@FK^wt?)2&i>o)||Dgw!kl?y1&@4f}(AtRTxC&)a@r4 z0@L#fo~FJKYhqefLBhX>P%|=0G{l)r>;2<ooztFZgy+4Br3r<kHpR|#qbX}ip<#z| zyWw>ZnMG`50WnWb@zlv%%%tI(IPfIL(XvA*#Rj&h=BJ<nXG)|~R&n-cQM?|wi_oZO z8w3tK4`iZcKIQEu;NCu-yD{6<a+ax(I8<-d;;#PXvbJm1;V2&b#?Ug&W(thxmNYNS z#gFpe0Q=t>LjM;h`)6zJ2Az)acKxcl-3AYlv#Ouu_v^a|m{ERfFr5cfNweZh^6d<d zF7)+U4rmz1=0u_Qn8cA}iIu9KXQbet$;1MoSDJtTAYdr`HSJpx=i|BK$$!L~z9D%G zNpA2e6Jae(XUAC$&csLVVcRiPL51W@MoPiio4Q<c*uBM|`aeN<!EWTq?=*o>0u5M# z@Ene5;q$NK7Ao`<ViWG~`d5ReW0*rZ3Z9ML6}5hcWPnpQ&AC>2YD}|h!6Q8%PP1R2 zWk1&*j6Fui$LO0aU`V*P;Qf_|3YhqN2nML3EBH#v3w*fy%W#znRrZ742I6QH^s5$! z3wXilwK;#HDgJSC#I5NiU1jG5^4n<#>vk;tk$EE*_|;U}mY$6SgZDd?#l3e5GYLpG zI6En@QSG)SyR<736GZMornO;5KTo!NLoDCf!C<<##Mh5Cm!lF-y2CGRqGU89aU+xD z4ohl|w=;^hj<P<sQ(D!fVh}&U34+?=U|A--ZV4O7D@5%9xd@Q|%y$1rH2weOW<$9x z(xLaHrde9$R_e35<OY#`>R`fyo7B_JySMx;86Nozy1hTLV8kBy)n<kEp;7o_O<Jo% zPTNP<9u$bWb#I4!mUxy}yHG}|e)sx~YoQ81zr6j6O|yuq18NM;=R?^&h45}#8}Etd zsMk}`NpqWR|8Rd9|BY(<O~)3O3wXuwnFvg$fKUPrI80qBf4QC)boat5HyzcM<o?y* z)`?6D{V#?#<xARM?;=zyzR3h_T<z;bLt(viecfp21zI?Kz|5xwzZ5!30w%oDCqb2_ z7tw7h``-fKMz!b`Devv}1JK(*92r_wd|^!Oxj??2>WaO;%mug%vFW{abgftaf&Amg zIO!0Pt(!xv7x?WYn<kx#N3x;lql2=a{#DpTs8zH9xxvZq?8T2l_!4O6Hh@wqUDY!n zYsqo{&3YG_rIyA3I@C5i!!`XqvRiXiA@XH)m;iLodsEkF;Tfdv^0<azh3|yTDX3t4 z1;;u6_7@P;yoF$@`2Q)NUh<duWx^)hVZ5Lk4F)eBae=ufubqx@XsH!d)b42aR@4Gr zC{dyNcTc`^w&oPi@?@hofal4Yy!rOe=BRD{+)Shbsv!9mBq+>&6K|aCR`my~3*X=b zz`!L+XPtl)K^2&DeNO6_`HI1+shn0pN<w#!=EY!eU0WgKjUbO{_kxjWDc~RwKhx?G z3L6;G6_-0fXCix#_7J>DhuM(mGl=yWAIx|Z*09iu$<Qk8g{s`SzQxE3t2SBe!B}dQ zeuV;aV;F^a-d+p$U}~2wfG#ZuNeRAJc(>$Vu7l;zgxEi;qW%m0{=$_Dgg!u&NkS;@ z0A4Z??8H1k!b=sFc~$Z_GL8(JGSL;9S!cQ*dC!!&r^wd1=_!bL_PF}OJ&Ctxk30~S z>^j=5K<>js?^zw<CaO8~cTW+@Edn>_&oYl5*PnWRs1;zCAIv-C;Sr16wwsmcYD7>g zbNyfwdP(4VD)J6tAN0-r(eVz}xUvSZlE0jK<P+V`zk4kjOiIwWK8!0&k6{<VReF$g zO5OwJn%m{t4s3i%_bT@9;bcO@#<jo<lmFvVv<UYd<#E|w`UcK<=E4EN2Z%+C9+H>c z!#4N(PTid-LrdM*C1OS>dyED;k?v+;{(_F~lud7eUFxY{>RBN9Ps!4hAEf*OWP*Bl z$wWAN6uVw55>nM$V0~GAvp94=8Z)WY4u0mg2i?g1P3Ao3#lE7rrjX%5@*SgSCW%v} zVZ;l+t4n<n0UFPiM%q1(7E1J=sbPry5Jk22K~m@ToIN%CEuKdSCS}T`j1Ja;e?smx zobd)|aCFwJQosH=^=Vmi$YX2SkP4_yh2$pPBhTU2ZAzK$wdq5%<c6SaKTqf^_J<WO zVf(m)l9>r8{HLl#Y-gJ~(TlQVdUz;NX7c@BYn-NxgV9<Y^}`X!ISk&fPVTyb_3=VY zyh!qZ_Ujs<u{Hj{wh(aJ#+IRWVsCk}2IF`W4=7%z{iT4D=w5PWSg;I7?;=(bT0`mJ z0)sM*HA~?2a4o_Q*A+G^-B|B%OQ{n7A3bae@e=}r!`b1?%1H&AxCaK%w+2R*)o&px zz57sYFP5+QW&hZNWXR=aCn)a6S!px`UEBVRnH(>>bjH_lhI^NMNsp}BX@3yh{9Lm; zW?MDz&>byn*nNJ>fae`8nx<X>wy|=0`-tQGgf~eiF}UWmr;61HFqG0I+A4Y!LQ~f9 z1$6W_P}u`AIC;C3YpZenOG^*obB_D5_^Uc!oU6$z&B09-0w;4$QdoZ_brbjYWzV>m zezLn_fwKdxcyhtvzWbj65q^}(+q?q=Qc#8zC_JRJlYQOvTe&~zu5tCg2PT;!o6@cS z@qWhRU561j9Gu49+4*4X;N4F#q9&=YL=<9=J`GvaLA1SiVc5I}Vno?V1z()_pkV_P zQ(CD(kn;>gCpw8GaAUbj_RnTDS4e$KKAwpoQ-9`m^OO11mHjtU`boy&8AUs*-pA^E zy$q~zgyFxX9yf+BJu_U@l{)7XINz^|k^lr_nXCR}=coJ%?Csx0@JZ=>F<oH`ye@HO z&Y*#QNyeYDJ@MP-_T%zDa-sVvhKh-FP0H=292#<!+5>~AjR!m#sG;l+bUzNKlRHN| zkz^6WX`{pCpf)dYE(Ho$#jh(lO@n$8jX?5Vd(6aV-F9zuD#P>5*}i+jYxuzfUEdum z@?aNMBJEGN5zvSUPgEmAtyFjiUulK;PawD4N@Huekcz6uVHml#Hn|^1ryO78hlw7} zCnOd<do8<_zGe98{NoW-)Q`+=T%~}(N^$<)>qYc01|xmK!hSPH7m$17oH97yBY4?1 z`x&z5ZJpZLqoUjPnDnMkw{nOqdsjbIM2IJK+<<Z)@~;#+JD{}E+mW%8nGU$npACED zc_@FY-T+d-(NO`!OC%=a$p^m4vUdvo_PDq@#Yc-F@(MF)u!`|{{Hj72r@`D+SEE{0 zsuR*GRu^ePL!CW|xH8PW7Y+)Td&uwSJ2}0loSL1GqZVW(-QKGFOKNw}4c<%7WaTXu z9xjBLCmyM~BLovW&uG$pQzuXw{6s+xe)L7XcJL+tg^=DMgkZ%J^zf?jZy9OQM^bIV z%}!58l^uTfmG9M~tL(H-p}Z3eTyzw2Skma7bRl+cY{}eaK6hr4zf~;>G|Xw0Q0TEx zw~OFGayOBTDV~o;-R*0}H?6a#-viSd{qbpJv(m4hEh_0pl@|ILL@(7d2)eo$(q&!H z0ka`0QgnnwFcm8MBY)EebRb^HzVL>&c1W%{euzZVmMIHl=f|8XQA`cankt7ohuFhV zN~;Pt`_6#;(($fV#ZiIMM^D#m*v-JKaLaWgkCBkO5;#ap^-dO_{FlzPpEtrjT$8~r z#$t@R7R^0%Pw<>6n5`K*)CBPBY~@|Irn;uoZaS^L^`olz$qF|V%9nj|kQg&}g<Jq4 z0K}0n8dmf{Y(MS-A1UYKxTdJ)*wicYLI>kehoqkPgCmKd+VHX2fLFNeLl=%T${<uH z8Z|WCED9?kR9S~yOGt6BHgyN<CYM(!G8^j6Y>X)l3!R0A@|*~HsF{g!zuVSe<Yn@N z;Hb=D+mBBLw&heea#pZs?M3_k(?tr0*<ztxS8URj$30e$!peq~BQpYG3I#ak%V?_z z6q-=6=j7o(nLh-jS-6DZW5<<tk0;@erSj%TlFFDjL+nv|e0D2|hOX8?bj(9Siq5&Z z0e&#tpHjt`sUfFpATV8A-!^zDXwz)pUC;`OVKp0?EsG-s-ykMlzw1g>;pg9|5vmpw zY6s7lR3RrM`*S9=*p6D3K5V8ulkhBgO$aCE5nzB%GM7_5-58K#d3NHNc!U@EwJb2F zc|S{52F4l8=0TcXdwEcCU2eCLH%altAPU^l_yncyNf%;oZ+!61={8&<&>s_T@26d* z>=#J>B4y^U+=eyI%Fan2&Yy`Wwf<FR(}#(pw&HwwtYrH*@<HIZ1TvM4u{cexT<m#f z>(R|QlAE_&TxXe;dtY+q;OBos&6{`ITcjkb%D&hEzf3mXS5F<k+E`(>Xh?l}q8#4N z@=z4wk!Nv!!!<>$^W!F7*YVy_Uk|M=YdHqREY@A~N9kvS{cldb57gg1bmv~%q^(D^ zjQ!38;#WVl%~liSZ$ZO~;L+I$ZqJ$~Jq{|pqvzyjOVqDtc4OYInb+;#k6Bf&&$qO9 z=8o82BW5p<yX}z!i9`=xyTNes=SmY!oF-oZ#(8kBU{LOhpYUekRNDDI0ZgZK)`_4y zv4g_-her6zzBa9y*qvSRH2W+8@Q4-OI=bGqS~b4Up!&jiT*8SHAkuWU-kSr_S(Nrz zu)TCT(BK#*bK)LmAYlIe`1MO?Nb~;`H_sR#L(oKZw_)QZ17~~@C|umoPh9s&*N0Ko zsWHCK*Y`fzpFRF_JJgb#iP!wGwFn_d+lJGh#Wd@5sOJ<AOgMB4GbtkoWZ_NisQ~z+ zH!Di#1}4fH)%_y0XV*=$g4NR}*kx{wsXZTm`&J~p=2Yst-!-+e5ShKktU??8ovYzb z8^rdMfQNBd?Zq+}d~|fctrxTEc@(7Ta_^7ta55;=q`*X{+Sa9H?HZ9z;*e5($1i_% z4YHH7<bg#7*0b()Cf-ALOd9jd4--z!%nycv=6`MzP9sjkyM&sA=LP}pVfV0GgM{Ij zB)Y)}iwh;P`g;E<8_vUH0^Y0~U6I{B_UQMqIhotEXIXDdwP^c0Rcziqt$cWw=c+B- zc-8o_oZMb|fBnf7+YgiU${7bDHmUw%z|z~xDB{voC!F$9ega%!lek2~QQ3+@+{B#d zjtp$$$DM=)M2N)p)?mN^?m>T8modeh$*jqT;bPrStqfyNH}CXepAu?*D5v6o_fjD} ziY<cKC7N8eM!L)``aF*qSe07+z*<kLDS+3j@AA8aJNyeo+Bb4XBxvE9bo0jCP4)p5 z5h1h9MMo1amRTCI^14&1sTJmw7)`1P!K{z)X0ts#YaiTMfT5Cp{tu^7F{{#*S8VQG zhj7ua9(`SK`93lII%I>{=?i1!cSZINsIfo(tC!++h49eX$o8#;w=YG==WY+lMzvt5 zK8U@y_!{%AyO`o}CNmRVs^%L_y0(PC4%tlHX6)W<k({Sl@Z=v>y4Gp;OwJ&?r=&9- zE$~P*0n9q3(@L^6%t{mzZ2+BeBJXzl0Q~vZ3OrN!2K@WYO-1=r?CO_Hg}6Yo5h8RF zQc8TgnT6T!xcamW%Ra=PqZw-)KFIv4SY<;(<$OLVYewJ!;AeRLt0=Zb<Ki)^te?@J z*km)(HW-he%+`LNkxdB~Z_~Xc+F@9cP2=_xv&)<F{>72RfhEH!lWKbHEq#LPD-cjB z^=Ud)shya0#ieRV9Iw1_$6imy=x0PbL6eNKB*|Joy4a{9o2Xz?eEBYSCdrxit`~<1 zVgE#E)|d*36qndhRh4C*XQ9(~S}kVz^6GX0IdGts-2^yz5_{FhoiL2{Vfs880#2kT z&a6QUk5B>fHOzz89<a@`_y?IxRWXa@nly$*(Oh+d+Z1~*k$5MrQ-lg02<=s^pYK^$ zVr*}EKWn0r&1$jZ5hIn)m>%6X0Rh9wpJpu(L_gmHRf!(uU#SY&&AG)#3f$(p4{$jU zn8(+Bc)hn)h)DMk4j$wu&4Gj=kHIO+kh|yVEq?qFDbN*t;><LXDPwCIx6Y>h?;3)k z@o7!YBlH{RoNcST;4*`#45J>dc7_=2<4S`BoUD;wUhSYTmSa`kbyEg2%PGs8=L)zV zeeWNI+(_WC!?4ziY#nmsy?bI1)+CC&dv^8)uD<EK+q7OxAQNeh6Ck*Ny#o1nTAC}C zX{Olfzp_sKLr<k{I9Zt<_76K(I6DZO=U?BC%Q9#<lbv0n!*d*aO;<qZ{7KrdF@373 zb44oLZrH#@uP=73vGGF}N_>Y#t(!4whuEFC7amVzEK^b02{4>Sv-(Vt@)Ei~@Y42E z)q0|OfsRV~wDLpV(mNcQ;X5l#C61wATEHM0p)SDlVS@$m5ggyblhMt4!B+ba#o|M^ z72>?unM;1l;P?V$th%4CO3+!;28pFu8wK+;dAgBBtQDWEk_jT6l+Ou+_o}`p5^m{{ z0_@!eD3jQ{1~NhqqeO`3)>}mNXoKZuvB%(2(2;-t^J924VSJIsx;rw-z`#gOe=mBr z<sn$$J5K2}-li2MhKAAHjQH`zKkF%W(Rrl*VO;n?2lHm1(_q>iYO0}Sw#iPUK>$Vg zD5M!N&>3#=UeD{9fi+b3JlRb5;}KzEi8A^|Kc?~$SU$XXEiwF9G7e@^M;+m)YvMc_ z5cYxv{2m3|eQ*y1SxdhKPX|1I-#$`JT{Xc&?--u?{P}uq`u8von~O`bB(OZoLm~YD zqWaw4a}qYoUT%{{kfShPWobxk`n?yuL#=YCprbUkQ@*BMEj9hl0~)cm`i<Kg^d$|@ zCiMfiiWJ!HFMQ<norIBd+I?K3_ODB5z<&zmAL;`_(tI+FbiKoaOb(<}7LNARp7(}U zZpxF&D(*E3UjbE6$vEg{z(xg7lhY}8&UPc|Egkz8-|QpsJhc<_0|@?_bT&rk=1G@e zkr`3ykORbKl{Bt4b}jm=gRgADC9qt7A(AxE1JqL(<`YFUgN?F8u%VlVluEOxK4}(u zX$mMX#BQ5yn)x`Px)XjNpu_{Z1oTOqBv@#CD@=!k!QVG-2o31f22j38GW~<qE_&#J zW0klefbCaHr(AlE%_&TnMcHi;sFR!-{O<noeIN^3{*h1tjs^CWyqsN;Vyt_%+nuUX zo_otla<F-Yvc;xYh(nkP+24;@2ge;PW86*f%zA~>6+&lvkL!o{3zab&D>~H(+{Y4; zJF9N2Q9V@Z#y!~hRQR;v3^7YN&h%DO{JvE}uvM;4XECJGzIVi}?%rEsp5U|*lgA)> z$>+A!Um9143ck{xwNu>)WiEdsZYCG}Q0&S&OB!$TO<p~5U2itqzgs~x$HNeUQpCCp zf#pLc)g3-i=e(fK`SbvCGXI4k6Ca?8RH@%^2^4WAHiA@?+bSw53|*iN6OpQ7lYLz# zJie6@x^k7Fr3iYqA<ND_>Y!Bgn(&VM0=h#3#9`?Wu5f3fE$;7}k|rngFRTdt<(49J z@GU0|fz1umw5M9$okp@P?qMy`0UDo!8^P|0IFXCKya_k>6`m5j6Q5-Yg7D~NqJ%_l zJZDL8;<xS%s)v=f!G*7VbvTod-ekk*la7{QP$MpkeZD>jvXGr<LblxjLETr#(*!hz z*@ZD?VX)QY`3=}AJT#m5k*e+W!Um*-*?1qWvf0*ZwCfSRabz-%JGlrsKmF~iY}fNH zFlg{J2UmsC2HG7C60*;V-#9kwBv0swf`JqtW{z=kXIvxJzy5uY{Os!97AEIgk_~?{ zB4V5GnIg{SXnL}*e<8FsOMwwfVV=XxavC1)=1NpJArMn2qqM&jzSiLBrdecPZiy4* zsp_sNy425dvB((Ms=h9s3x-W2cZ=Mnv4zHOELc_ui_^BIMI5I@Oh_|Mo#;dg+fzNj z3)X2;rMOiM96v<Iiu}%5gvD&#A!y7B(4Gg1B6@<}3`{*Ju!t{f1wGf0<EJO`@br){ z`%;+)%lOflX<>N(ZDoPSpz?ss#Lrf2v)YX;HFDQ*g*`-73?sey-9<^&4K&rmrk}P; z=Wb^H(%{@~@VnCI83wK&3uZG1sGc<%s-b9F?A{ENAw!B2Dnqz`coBN(6KmH^QM6P; zVT5kXW=#?Y{&*b3_eED>T93*2CZ}4oPV^bW_CgTOK9kvl3{TC>7ePxuZ$r0`!R<hB z@y~GvJn|ST?K`G&J~gdXu{^?$MJx;_wTi-4pYb-vS$`NF6;)R<AUJRK)f|EaP*{7~ z$U9H!FxLASL6GeO;-qF(*K00~iB@qZ0}#R&X9xN*8aIL6ha<?Z(MIb;!b6;2Gh)1E zX~>nYf6rl9Fo-G4VK~HB=N}qtFWLN(dOB+!-5JIFVG78UZnLIY5>!3&UiKqf6X7iG z&e@yzeJjpeX}g5r)Ym6$-mKYv%z85vJ9)%ZehxARLN2%fQZ|1-2JD%R{W?&K9<RdZ zlpoGV-R1-1NM4e*81aacwaoYa%WW6R8FF_$eohztaiy%qG#n{eze?Qd@Y140cPA_v z*}9mtZ?)H{*;yn^XU?g7kGvGx^5PG@pJG<I!+H1B{kUMhy~`Jk9#WARvg{<UrTjEP zS#_AJR0V5vxwB%{5|~}Fr#3OE!oE)=n#5t)w|!y*tgknULM4_bk3|DI@R9i6O!V`{ z6qu}*Sh8Tf*_jo#U=MyKLNLvp7YTg}<a6?o4|xNOB60*uS>Y}>OA^u3I{lqhQWXt( zyVqv@L8Tu`0wtFPB&V{@FD-)Yt`=v>Dut^Fr$WyAr4`6^=>W|c&8=mj77{WRsY%W6 zKofGHcd@$UDYwtrgZC3It&bh<p%{qz!99MD>qyX{%?dLx6y2b!U({gAIe!(lD*fTg zh$Fs~Iv2SwJgf5hTXyz|-@&~k{~jWhH)ZvYy<SbmxDct>awH}u@zz$~5;x@0Fu)s2 zf|br{Chr5&xO0*185o??g7-TuZ)woM_+A(^xI0KYo&9(ox9%$K06}t`AFu!X;Zqj{ zkNHB{H?Sk~HIwdbFPgd*Y+AD#mlaZgEx<)F)R0nvpCUL`O)K#Edsbb#)mS&vyws_X zlYWv)*&*Zgalyqn{aTfi%`G1Xo4s!Xncu)JC~CMra*&|i4dnia@Vo)eqrom@s1L(- zJ8vzmTV~gUCH;s^$gk##_C!feukjJDS%$Px#TsnTUy^aLF-jP1(@L&<gZ$(3v{H}8 z{Z(*894G}mxx_D~`>@d>Sd`c78j?!_ld@NNqK}#Yi{S5m(JYDEpLy+#gK6W>(<5J? zL$+8W+=h*Ef=pep24f1eHl#PY!7q~mw}xDqIg2hp*k0Wk9sDme5w@y4rFSD>R<Y}Y zX_>QE=L@&_F4&LF$)`*bO;WH8s8D+xZz-Ff532E$)bH|(vd`1bT-U2@W1l-Ig2Ltn z`2_~`bPnC8Ro|o#NsKdll1Y@0DOhD|ykWYAd)>Nc2JU8IKM2z>0@02)46Zyt2R1RC ztPdjF9m!W{ufO_q-~_3B)DWNg<afnBnBmjYJJ%9Nwm2<y7Vp>^JYQ7~A6VJ<z}<g7 z;k(vvu1+q(dfctA?LC0$0)tF6gJO#eYA96e%;3iY8|odt{XGL0M?%4&$E8nVT@>L` z62<%Q+%z(=(p*sT`(w$GDtW0Dy7T<CQqGX4uFvi*)EWjWfvub@tKM_+C@b<F_jOmb zM(5wB%HA?w{VNo-mvBT&eNs}&NBFym5~s$ehg&XgW;>^PtYvrg31xDi)JDTV<gS5U za6SR-_R>q7QGawLO$yO}<57fMe*pFpMam8j%kXXlOEx^NbHKq>IG2M6CoL-Ad3jv) z<2ADB-mjWDuzsz^EHa%!e(QG%7Jr{BOa<TA-UY2L!3^AD7jycE3K6KyPOt*?Kv^<! zG!_>`{luZxzXD%77blpKqd^!WTZ=m8{W9C9WGj3+-|uY4bFT04HJ@`Kyvv>&lduAU z2s$f9QS1Cm2KP5Dr|2l7(n>yxnHJCNm~6MMUoA0sOV@8VC%w3KGuY8Rcu9$9Zx@lT z7o71DZJ^zliU=j`52ni7<D-ic*i~0eHF)F5(_HjjEP5wOH)5mfZK-1XMN`WDw@ljZ zPRq{PeyrZom2YY2)v4lv@srb`c^&dQFL4V_ufzQ5kgD~?c!7{Ox~6Z7i!#eIXT3!c zf?)?q2%<JUmNo9$rG0mOUvz=ji=>S3zkjfihp(m#2v2aef^s{uwtA!0V{%8K(9n^g z`)h;8Iow-n5h0WPOjh0R#PGn}3DraWJdYax)8m}xrKlF#@NbbegFUd_7S{|}3oEK~ zl_rR-+LHee=D5s16F#ti4<F%c)trK4Yhc?{LUp!K2^Yxgc^o!?>n5stf~)7gVQ^^X zlz->MK+g&IrTY$a1#lY{TM^)4IvmqX!XfqIk`8k@MzZXgQ|;4}sxD`zUC-9c%4<I= z*q8+&UmIepR3(8YMIkA23S#}}n5l9bU;YT?p4@?%+8)&!jN5Vm#NV1&?QKp=j@O<` z3xhYC%}WQskg?C%)6u&w$DPEq@)3EWYnPM6Modq0S;Lx;g<i!WauJt(C0V@Y&bLoL zVyQN&32-H7m?Zslui>cc?&f<JQc~?6YE$O`>%i#Hhhw*|swIu8X`#Oj<@wSnq=%~7 zo-<m6)Nz`CMUf+);cHI&w(jaZM1fGY|KJS0|AQY7e)@=a3JAy0eshsvYqVBb4EC?P zpVB6<({O~jWHz?%^}r8&Qb0V;qCWE5RnSD*M&KJ5P7--*?eDzRS9*|o0D$Y(5~Uej zKJ4mx`L`O;z9sG3xx5;>1nxr0d&37bMob&QIb+PX%GWVLsWll-M4j+rqmonD1hD;X z<^S!`A912Pidzpkit>jrl)Iis_vun@yWd%nVFuo*3WRfmxx8#i-y#DPAftxKbXw14 zY4z9Dee^(?v5xv5&u?CH2|vZhU2&PF2p{f3%zi9-!*|XI4KrHd`i_(`VJWLh&S^)O zJ#q?OdO=5sal-dS{OVxWj1`|!Y<97iZUZM)J>w_i#IAy$cdtm7%NEM1-76*}v4IjS z`=$m=%|{OgFl=ebU^57W{Q>cGssYrCZ+O)-J;X*n$yX#>rxtg0wgzJ<S27kc38;zW z5Zw}bq7?^PH$Z|zQ)s%=a5T!f*G+|2+~frMVc0T^PY$`~rp<2ixMcT-Ir^q8*wg{r zq!fjrWN-V?S)@KvQ(4d8O%GS|v9u1xI#NK1x%T(ts2&PJo8hRxXfU_s=z&pWW0Sgr zX#5s_n>I$1?u3hBxT>dd{9WEvvAHxEEcs4w^|DW2eu;j8RVGs=-n3~#AvoZg=h~&Y z(;@g)Vf7w7K;v)`O9QMmhQ!f_Ui$QU1dolTQ__oVw(l)s+e7>bllJ)<#=4bGGSd$| zxe}c3lI_G|aS&v;;R=I`D7c4Q+jcuEbjyU;N*_ybTi>m)kH^O<Kfx`xVWXt(TiuP3 zxh&VPve0ibs{!A!iC)Mst3JraOg`S8>QMPB9R0Nh7mQb%>?Jl>tj%xk#_WQ%XNx7+ zgLPv8oFt$vZuH@yQR3=UIpF5w2o-y4D)<V+a655h*i{vU^3OARwv-W~x=*35(&q(2 z`5HuP$(#la3np+(ddte9ce)MFW6C^SKtR=rIseklP%8n-5c6=)tKtHQ>DnED_-pVT z)e6x&l<!)ZN?%$ouh%Dm(-8fJ?g!4CcRMf)<b@0|I}OD8B>DyCqs>7nbWP+tPSG>( z^Y@$rG&VEuw~PMmvv*z*ir6(4)N2Z3w!Vb9A~-VAE=&V$x#0vnkwJ%Ba!*4Ft>`$p zA_8M-y;NOjB%aZy8aQxo`^|d!dExYqh`?zy<Sc@2^ZE}fV*cnUgJ?+W+)%24kPrQW zDtXl1AbQUmo<5aWo!*f@klfg!RTIEFoPjITu-P8?P9E`<u$V_EKeqtxxNhZWhV;)* z_e1Ao@O>U7LtiF>Sxr#98z4LGe{mT93#ae@JC*X^FC+Jm1_%$_1BsNkbV#24!K!L+ zumjP`eIR#bpnP~_G+vq(D0KHna>3FI?dp}b7>(c{_n<AQMIB^Ap`5ZP;KK%sSoMpi zz)3mnvC6-kKJuQvJp%^ecM}Q#aLxNDhkiUAWTq6DFGzv^fzw}}+Cy3Z95MQzVSjSt zuTRs&^RkZ-Z_|QTwf~i1!M#PAzPh^9aeuQuzB8@mQ%hz<zG(W~+E!lyC9K;<q?z+A zv<^Bc*e+tO_{_ESlZEytA{8m>hx2c&XJl3hY_N@QM{UVN_&SjBIdJ_yW|{tb)%;UR z>VLnRlR#1%nYh`B@3g2EY9a-a>nqv~GAE5r|6iD~@qLy5py$p8y6}*O`GjPGtfV1Q zL_a$GnIoXZ_gNI=qPUE_ew2cw+~@}4wFN6k;2;GFRz1f1>6zoS-UX#s_+oUVbYbRO zf$cxdFX+MOC#Ixf?-(~jD7V`|=1_t8vW>Yc{t%p=I)mpyVv_S&XLV^mF-XEh(NvH= z>TAVT*^ZCwT(Fcu^Ix)mGQ1a4s}l~gk|0@fNM2Zl9nrBw!Wy@=K4DB|7g0h^1^7o3 yllGgZg0CH-$0=36P`O}C{bv%iVW@pAf-+YEBx&bEe-cPt2VnQ~)hktPpZ^a=hZQ0K literal 0 HcmV?d00001 diff --git a/metadata-service/war/src/main/resources/boot/data_platforms.json b/metadata-service/war/src/main/resources/boot/data_platforms.json index 3c70eda8561b86..0574f3fda40176 100644 --- a/metadata-service/war/src/main/resources/boot/data_platforms.json +++ b/metadata-service/war/src/main/resources/boot/data_platforms.json @@ -574,5 +574,15 @@ "type": "OTHERS", "logoUrl": "/assets/platforms/fivetranlogo.png" } + }, + { + "urn": "urn:li:dataPlatform:csv", + "aspect": { + "datasetNameDelimiter": ".", + "name": "csv", + "displayName": "CSV", + "type": "OTHERS", + "logoUrl": "/assets/platforms/csv-logo.png" + } } ] From 6a1801089116e04333ab20c80183ff73c0b2374c Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 4 Dec 2023 23:46:42 +0530 Subject: [PATCH 19/23] fix(vulns): node-fetch forwards secure headers to untrusted sites (#9375) unrelated smoke test failing --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 1986 ++++++++++++++------------------ 2 files changed, 893 insertions(+), 1095 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 22c88f9647dc27..c26338ea285fbf 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -124,7 +124,7 @@ }, "devDependencies": { "@babel/plugin-proposal-private-property-in-object": "^7.21.11", - "@graphql-codegen/cli": "1.20.0", + "@graphql-codegen/cli": "^5.0.0", "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", "@graphql-codegen/typescript-react-apollo": "2.2.1", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index d33299a79b13a1..41b542da97550b 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -184,6 +184,13 @@ signedsource "^1.0.0" yargs "^15.3.1" +"@ardatan/sync-fetch@^0.0.1": + version "0.0.1" + resolved "https://registry.yarnpkg.com/@ardatan/sync-fetch/-/sync-fetch-0.0.1.tgz#3385d3feedceb60a896518a1db857ec1e945348f" + integrity sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA== + dependencies: + node-fetch "^2.6.1" + "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" @@ -204,6 +211,11 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== +"@babel/compat-data@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" + integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== + "@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.14.0", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" @@ -225,6 +237,27 @@ json5 "^2.2.2" semver "^6.3.0" +"@babel/core@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.5.tgz#6e23f2acbcb77ad283c5ed141f824fd9f70101c7" + integrity sha512-Cwc2XjUrG4ilcfOw4wBAK+enbdgwAcAJCfGUItPBKR7Mjw4aEfAFYrLxeRp4jWgtNIKn3n2AlBOfwwafl+42/g== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.5" + "@babel/parser" "^7.23.5" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + "@babel/eslint-parser@^7.16.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.5.tgz#fa032503b9e2d188e25b1b95d29e8b8431042d78" @@ -244,7 +277,7 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.23.5": +"@babel/generator@^7.18.13", "@babel/generator@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" integrity sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA== @@ -279,6 +312,17 @@ lru-cache "^5.1.1" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" + lru-cache "^5.1.1" + semver "^6.3.1" + "@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.5.tgz#2192a1970ece4685fbff85b48da2c32fcb130b7c" @@ -362,6 +406,13 @@ dependencies: "@babel/types" "^7.22.5" +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== + dependencies: + "@babel/types" "^7.22.15" + "@babel/helper-module-transforms@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" @@ -376,6 +427,17 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" @@ -448,16 +510,21 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== -"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== - "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-identifier@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== + +"@babel/helper-validator-option@^7.22.15": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" + integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== + "@babel/helper-validator-option@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" @@ -482,6 +549,15 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helpers@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.5.tgz#52f522840df8f1a848d06ea6a79b79eefa72401e" + integrity sha512-oO7us8FzTEsG3U6ag9MfdF1iA/7Z6dz+MtFhifZk8C8o453rGJFFWUP1t+ULM9TUIAzC9uxXEiXjOiVMyd7QPg== + dependencies: + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + "@babel/highlight@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" @@ -500,17 +576,12 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@7.12.16": - version "7.12.16" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" - integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== - "@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== -"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": +"@babel/parser@^7.16.8", "@babel/parser@^7.22.15", "@babel/parser@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== @@ -673,6 +744,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz#9c05a7f592982aff1a2768260ad84bcd3f0c77fc" + integrity sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-import-assertions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" @@ -1384,7 +1462,7 @@ dependencies: regenerator-runtime "^0.13.11" -"@babel/template@^7.22.15": +"@babel/template@^7.18.10", "@babel/template@^7.22.15": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== @@ -1402,7 +1480,7 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@7.12.13", "@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": +"@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.22.5", "@babel/traverse@^7.23.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== @@ -1418,15 +1496,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/types@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.13.tgz#8be1aa8f2c876da11a9cf650c0ecf656913ad611" - integrity sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ== - dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - lodash "^4.17.19" - to-fast-properties "^2.0.0" - "@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" @@ -1436,7 +1505,7 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" -"@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": +"@babel/types@^7.16.8", "@babel/types@^7.18.13", "@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== @@ -1727,16 +1796,6 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.0.tgz#ea89004119dc42db2e1dba0f97d553f7372f6fcb" integrity sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg== -"@endemolshinegroup/cosmiconfig-typescript-loader@3.0.2": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@endemolshinegroup/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-3.0.2.tgz#eea4635828dde372838b0909693ebd9aafeec22d" - integrity sha512-QRVtqJuS1mcT56oHpVegkKBlgtWjXw/gHNWO3eL9oyB5Sc7HBoc2OLG/nYpVfT/Jejvo3NUrD0Udk7XgoyDKkA== - dependencies: - lodash.get "^4" - make-error "^1" - ts-node "^9" - tslib "^2" - "@eslint-community/eslint-utils@^4.2.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1777,66 +1836,55 @@ "@graphql-codegen/plugin-helpers" "^1.18.2" tslib "~2.0.1" -"@graphql-codegen/cli@1.20.0": - version "1.20.0" - resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-1.20.0.tgz#e1bb62fce07caaf1395ca6e94ffc0f2ba1f57938" - integrity sha512-5pLtZoaqEmEui6PR7IArmD23VLD3++UQby6iNe4NFG4eMcRai2raIM0E4a/MSn7SjyfSRguekYMMC5JKS1VgQw== - dependencies: - "@graphql-codegen/core" "1.17.9" - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/apollo-engine-loader" "^6" - "@graphql-tools/code-file-loader" "^6" - "@graphql-tools/git-loader" "^6" - "@graphql-tools/github-loader" "^6" - "@graphql-tools/graphql-file-loader" "^6" - "@graphql-tools/json-file-loader" "^6" - "@graphql-tools/load" "^6" - "@graphql-tools/prisma-loader" "^6" - "@graphql-tools/url-loader" "^6" - "@graphql-tools/utils" "^7.0.0" - ansi-escapes "^4.3.1" - camel-case "^4.1.2" +"@graphql-codegen/cli@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-5.0.0.tgz#761dcf08cfee88bbdd9cdf8097b2343445ec6f0a" + integrity sha512-A7J7+be/a6e+/ul2KI5sfJlpoqeqwX8EzktaKCeduyVKgOLA6W5t+NUGf6QumBDXU8PEOqXk3o3F+RAwCWOiqA== + dependencies: + "@babel/generator" "^7.18.13" + "@babel/template" "^7.18.10" + "@babel/types" "^7.18.13" + "@graphql-codegen/core" "^4.0.0" + "@graphql-codegen/plugin-helpers" "^5.0.1" + "@graphql-tools/apollo-engine-loader" "^8.0.0" + "@graphql-tools/code-file-loader" "^8.0.0" + "@graphql-tools/git-loader" "^8.0.0" + "@graphql-tools/github-loader" "^8.0.0" + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/prisma-loader" "^8.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.8.0" chalk "^4.1.0" - chokidar "^3.4.3" - common-tags "^1.8.0" - constant-case "^3.0.3" - cosmiconfig "^7.0.0" + cosmiconfig "^8.1.3" debounce "^1.2.0" - dependency-graph "^0.9.0" detect-indent "^6.0.0" - glob "^7.1.6" - graphql-config "^3.2.0" - indent-string "^4.0.0" - inquirer "^7.3.3" + graphql-config "^5.0.2" + inquirer "^8.0.0" is-glob "^4.0.1" + jiti "^1.17.1" json-to-pretty-yaml "^1.2.2" - latest-version "5.1.0" - listr "^0.14.3" - listr-update-renderer "^0.5.0" + listr2 "^4.0.5" log-symbols "^4.0.0" - lower-case "^2.0.1" - minimatch "^3.0.4" - mkdirp "^1.0.4" - pascal-case "^3.1.1" - request "^2.88.2" + micromatch "^4.0.5" + shell-quote "^1.7.3" string-env-interpolation "^1.0.1" ts-log "^2.2.3" - tslib "~2.0.1" - upper-case "^2.0.2" - valid-url "^1.0.9" - wrap-ansi "^7.0.0" - yaml "^1.10.0" - yargs "^16.1.1" + tslib "^2.4.0" + yaml "^2.3.1" + yargs "^17.0.0" -"@graphql-codegen/core@1.17.9": - version "1.17.9" - resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-1.17.9.tgz#c03e71018ff04d26f5139a2d90a32b31d3bb2b43" - integrity sha512-7nwy+bMWqb0iYJ2DKxA9UiE16meeJ2Ch2XWS/N/ZnA0snTR+GZ20USI8z6YqP1Fuist7LvGO1MbitO2qBT8raA== +"@graphql-codegen/core@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-4.0.0.tgz#b29c911746a532a675e33720acb4eb2119823e01" + integrity sha512-JAGRn49lEtSsZVxeIlFVIRxts2lWObR+OQo7V2LHDJ7ohYYw3ilv7nJ8pf8P4GTg/w6ptcYdSdVVdkI8kUHB/Q== dependencies: - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/merge" "^6" - "@graphql-tools/utils" "^6" - tslib "~2.0.1" + "@graphql-codegen/plugin-helpers" "^5.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "~2.5.0" "@graphql-codegen/fragment-matcher@^5.0.0": version "5.0.0" @@ -1868,7 +1916,7 @@ lodash "~4.17.0" tslib "~2.3.0" -"@graphql-codegen/plugin-helpers@^5.0.0": +"@graphql-codegen/plugin-helpers@^5.0.0", "@graphql-codegen/plugin-helpers@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.1.tgz#e2429fcfba3f078d5aa18aa062d46c922bbb0d55" integrity sha512-6L5sb9D8wptZhnhLLBcheSPU7Tg//DGWgc5tQBWX46KYTOTQHGqDpv50FxAJJOyFVJrveN9otWk9UT9/yfY4ww== @@ -1929,125 +1977,181 @@ parse-filepath "^1.0.2" tslib "~2.3.0" -"@graphql-tools/apollo-engine-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-6.2.5.tgz#b9e65744f522bb9f6ca50651e5622820c4f059a8" - integrity sha512-CE4uef6PyxtSG+7OnLklIr2BZZDgjO89ZXK47EKdY7jQy/BQD/9o+8SxPsgiBc+2NsDJH2I6P/nqoaJMOEat6g== +"@graphql-tools/apollo-engine-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.0.tgz#ac1f351cbe41508411784f25757f5557b0f27489" + integrity sha512-axQTbN5+Yxs1rJ6cWQBOfw3AEeC+fvIuZSfJLPLLvFJLj4pUm9fhxey/g6oQZAAQJqKPfw+tLDUQvnfvRK8Kmg== dependencies: - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" -"@graphql-tools/batch-execute@^7.1.2": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-7.1.2.tgz#35ba09a1e0f80f34f1ce111d23c40f039d4403a0" - integrity sha512-IuR2SB2MnC2ztA/XeTMTfWcA0Wy7ZH5u+nDkDNLAdX+AaSyDnsQS35sCmHqG0VOGTl7rzoyBWLCKGwSJplgtwg== +"@graphql-tools/batch-execute@^9.0.1": + version "9.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-9.0.2.tgz#5ac3257501e7941fad40661bb5e1110d6312f58b" + integrity sha512-Y2uwdZI6ZnatopD/SYfZ1eGuQFI7OU2KGZ2/B/7G9ISmgMl5K+ZZWz/PfIEXeiHirIDhyk54s4uka5rj2xwKqQ== dependencies: - "@graphql-tools/utils" "^7.7.0" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/code-file-loader@^6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-6.3.1.tgz#42dfd4db5b968acdb453382f172ec684fa0c34ed" - integrity sha512-ZJimcm2ig+avgsEOWWVvAaxZrXXhiiSZyYYOJi0hk9wh5BxZcLUNKkTp6EFnZE/jmGUwuos3pIjUD3Hwi3Bwhg== +"@graphql-tools/code-file-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-8.0.3.tgz#8e1e8c2fc05c94614ce25c3cee36b3b4ec08bb64" + integrity sha512-gVnnlWs0Ua+5FkuHHEriFUOI3OIbHv6DS1utxf28n6NkfGMJldC4j0xlJRY0LS6dWK34IGYgD4HelKYz2l8KiA== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.5.1" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/delegate@^7.0.1", "@graphql-tools/delegate@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-7.1.5.tgz#0b027819b7047eff29bacbd5032e34a3d64bd093" - integrity sha512-bQu+hDd37e+FZ0CQGEEczmRSfQRnnXeUxI/0miDV+NV/zCbEdIJj5tYFNrKT03W6wgdqx8U06d8L23LxvGri/g== +"@graphql-tools/delegate@^10.0.0", "@graphql-tools/delegate@^10.0.3": + version "10.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-10.0.3.tgz#2d0e133da94ca92c24e0c7360414e5592321cf2d" + integrity sha512-Jor9oazZ07zuWkykD3OOhT/2XD74Zm6Ar0ENZMk75MDD51wB2UWUIMljtHxbJhV5A6UBC2v8x6iY0xdCGiIlyw== dependencies: - "@ardatan/aggregate-error" "0.0.6" - "@graphql-tools/batch-execute" "^7.1.2" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.7.1" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/batch-execute" "^9.0.1" + "@graphql-tools/executor" "^1.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.5.0" + +"@graphql-tools/executor-graphql-ws@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.0.tgz#7727159ebaa9df4dc793d0d02e74dd1ca4a7cc60" + integrity sha512-yM67SzwE8rYRpm4z4AuGtABlOp9mXXVy6sxXnTJRoYIdZrmDbKVfIY+CpZUJCqS0FX3xf2+GoHlsj7Qswaxgcg== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@types/ws" "^8.0.0" + graphql-ws "^5.14.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + ws "^8.13.0" -"@graphql-tools/git-loader@^6": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-6.2.6.tgz#c2226f4b8f51f1c05c9ab2649ba32d49c68cd077" - integrity sha512-ooQTt2CaG47vEYPP3CPD+nbA0F+FYQXfzrB1Y1ABN9K3d3O2RK3g8qwslzZaI8VJQthvKwt0A95ZeE4XxteYfw== +"@graphql-tools/executor-http@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-http/-/executor-http-1.0.4.tgz#d4b3b32430c24b0167760d3b6ffb91846a3b6956" + integrity sha512-lSoPFWrGU6XT9nGGBogUI8bSOtP0yce2FhXTrU5akMZ35BDCNWbkmgryzRhxoAH/yDOaZtKkHQB3xrYX3uo5zA== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@repeaterjs/repeater" "^3.0.4" + "@whatwg-node/fetch" "^0.9.0" + extract-files "^11.0.0" + meros "^1.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/executor-legacy-ws@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.4.tgz#27fcccba782daf605d4cf34ffa85a675f43c33f6" + integrity sha512-b7aGuRekZDS+m3af3BIvMKxu15bmVPMt5eGQVuP2v5pxmbaPTh+iv5mx9b3Plt32z5Ke5tycBnNm5urSFtW8ng== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/utils" "^10.0.0" + "@types/ws" "^8.0.0" + isomorphic-ws "5.0.0" + tslib "^2.4.0" + ws "8.14.2" -"@graphql-tools/github-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-6.2.5.tgz#460dff6f5bbaa26957a5ea3be4f452b89cc6a44b" - integrity sha512-DLuQmYeNNdPo8oWus8EePxWCfCAyUXPZ/p1PWqjrX/NGPyH2ZObdqtDAfRHztljt0F/qkBHbGHCEk2TKbRZTRw== +"@graphql-tools/executor@^1.0.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor/-/executor-1.2.0.tgz#6c45f4add765769d9820c4c4405b76957ba39c79" + integrity sha512-SKlIcMA71Dha5JnEWlw4XxcaJ+YupuXg0QCZgl2TOLFz4SkGCwU/geAsJvUJFwK2RbVLpQv/UMq67lOaBuwDtg== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + "@graphql-typed-document-node/core" "3.2.0" + "@repeaterjs/repeater" "^3.0.4" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/graphql-file-loader@^6", "@graphql-tools/graphql-file-loader@^6.0.0": - version "6.2.7" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-6.2.7.tgz#d3720f2c4f4bb90eb2a03a7869a780c61945e143" - integrity sha512-5k2SNz0W87tDcymhEMZMkd6/vs6QawDyjQXWtqkuLTBF3vxjxPD1I4dwHoxgWPIjjANhXybvulD7E+St/7s9TQ== +"@graphql-tools/git-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-8.0.3.tgz#a86d352b23a646c28d27282fef7694b846b31c44" + integrity sha512-Iz9KbRUAkuOe8JGTS0qssyJ+D5Snle17W+z9anwWrLFrkBhHrRFUy5AdjZqgJuhls0x30QkZBnnCtnHDBdQ4nA== dependencies: - "@graphql-tools/import" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + is-glob "4.0.3" + micromatch "^4.0.4" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/graphql-tag-pluck@^6.2.6", "@graphql-tools/graphql-tag-pluck@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-6.5.1.tgz#5fb227dbb1e19f4b037792b50f646f16a2d4c686" - integrity sha512-7qkm82iFmcpb8M6/yRgzjShtW6Qu2OlCSZp8uatA3J0eMl87TxyJoUmL3M3UMMOSundAK8GmoyNVFUrueueV5Q== +"@graphql-tools/github-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-8.0.0.tgz#683195800618364701cfea9bc6f88674486f053b" + integrity sha512-VuroArWKcG4yaOWzV0r19ElVIV6iH6UKDQn1MXemND0xu5TzrFme0kf3U9o0YwNo0kUYEk9CyFM0BYg4he17FA== dependencies: - "@babel/parser" "7.12.16" - "@babel/traverse" "7.12.13" - "@babel/types" "7.12.13" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/graphql-tag-pluck" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/import@^6.2.6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.3.1.tgz#731c47ab6c6ac9f7994d75c76b6c2fa127d2d483" - integrity sha512-1szR19JI6WPibjYurMLdadHKZoG9C//8I/FZ0Dt4vJSbrMdVNp8WFxg4QnZrDeMG4MzZc90etsyF5ofKjcC+jw== +"@graphql-tools/graphql-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.0.tgz#a2026405bce86d974000455647511bf65df4f211" + integrity sha512-wRXj9Z1IFL3+zJG1HWEY0S4TXal7+s1vVhbZva96MSp0kbb/3JBF7j0cnJ44Eq0ClccMgGCDFqPFXty4JlpaPg== dependencies: + "@graphql-tools/import" "7.0.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/graphql-tag-pluck@8.1.0", "@graphql-tools/graphql-tag-pluck@^8.0.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.1.0.tgz#0745b6f0103eb725f10c5d4c1a9438670bb8e05b" + integrity sha512-kt5l6H/7QxQcIaewInTcune6NpATojdFEW98/8xWcgmy7dgXx5vU9e0AicFZIH+ewGyZzTpwFqO2RI03roxj2w== + dependencies: + "@babel/core" "^7.22.9" + "@babel/parser" "^7.16.8" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + +"@graphql-tools/import@7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-7.0.0.tgz#a6a91a90a707d5f46bad0fd3fde2f407b548b2be" + integrity sha512-NVZiTO8o1GZs6OXzNfjB+5CtQtqsZZpQOq+Uu0w57kdUkT4RlQKlwhT8T81arEsbV55KpzkpFsOZP7J1wdmhBw== + dependencies: + "@graphql-tools/utils" "^10.0.0" resolve-from "5.0.0" - tslib "~2.2.0" + tslib "^2.4.0" -"@graphql-tools/json-file-loader@^6", "@graphql-tools/json-file-loader@^6.0.0": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-6.2.6.tgz#830482cfd3721a0799cbf2fe5b09959d9332739a" - integrity sha512-CnfwBSY5926zyb6fkDBHnlTblHnHI4hoBALFYXnrg0Ev4yWU8B04DZl/pBRUc459VNgO2x8/mxGIZj2hPJG1EA== +"@graphql-tools/json-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-8.0.0.tgz#9b1b62902f766ef3f1c9cd1c192813ea4f48109c" + integrity sha512-ki6EF/mobBWJjAAC84xNrFMhNfnUFD6Y0rQMGXekrUgY0NdeYXHU0ZUgHzC9O5+55FslqUmAUHABePDHTyZsLg== dependencies: - "@graphql-tools/utils" "^7.0.0" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/load@^6", "@graphql-tools/load@^6.0.0": - version "6.2.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-6.2.8.tgz#16900fb6e75e1d075cad8f7ea439b334feb0b96a" - integrity sha512-JpbyXOXd8fJXdBh2ta0Q4w8ia6uK5FHzrTNmcvYBvflFuWly2LDTk2abbSl81zKkzswQMEd2UIYghXELRg8eTA== +"@graphql-tools/load@^8.0.0": + version "8.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-8.0.1.tgz#498f2230448601cb87894b8a93df7867daef69ea" + integrity sha512-qSMsKngJhDqRbuWyo3NvakEFqFL6+eSjy8ooJ1o5qYD26N7dqXkKzIMycQsX7rBK19hOuINAUSaRcVWH6hTccw== dependencies: - "@graphql-tools/merge" "^6.2.12" - "@graphql-tools/utils" "^7.5.0" - globby "11.0.3" - import-from "3.0.0" - is-glob "4.0.1" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.11" p-limit "3.1.0" - tslib "~2.2.0" - unixify "1.0.0" - valid-url "1.0.9" + tslib "^2.4.0" -"@graphql-tools/merge@^6", "@graphql-tools/merge@^6.0.0", "@graphql-tools/merge@^6.2.12": - version "6.2.14" - resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-6.2.14.tgz#694e2a2785ba47558e5665687feddd2935e9d94e" - integrity sha512-RWT4Td0ROJai2eR66NHejgf8UwnXJqZxXgDWDI+7hua5vNA2OW8Mf9K1Wav1ZkjWnuRp4ztNtkZGie5ISw55ow== +"@graphql-tools/merge@^9.0.0", "@graphql-tools/merge@^9.0.1": + version "9.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-9.0.1.tgz#693f15da152339284469b1ce5c6827e3ae350a29" + integrity sha512-hIEExWO9fjA6vzsVjJ3s0cCQ+Q/BEeMVJZtMXd7nbaVefVy0YDyYlEkeoYYNV3NVVvu1G9lr6DM1Qd0DGo9Caw== dependencies: - "@graphql-tools/schema" "^7.0.0" - "@graphql-tools/utils" "^7.7.0" - tslib "~2.2.0" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" "@graphql-tools/optimize@^1.0.1": version "1.3.1" @@ -2056,31 +2160,28 @@ dependencies: tslib "^2.4.0" -"@graphql-tools/prisma-loader@^6": - version "6.3.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-6.3.0.tgz#c907e17751ff2b26e7c2bc75d0913ebf03f970da" - integrity sha512-9V3W/kzsFBmUQqOsd96V4a4k7Didz66yh/IK89B1/rrvy9rYj+ULjEqR73x9BYZ+ww9FV8yP8LasWAJwWaqqJQ== +"@graphql-tools/prisma-loader@^8.0.0": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-8.0.2.tgz#3a7126ec2389a7aa7846bd0e441629ac5a1934fc" + integrity sha512-8d28bIB0bZ9Bj0UOz9sHagVPW+6AHeqvGljjERtwCnWl8OCQw2c2pNboYXISLYUG5ub76r4lDciLLTU+Ks7Q0w== dependencies: - "@graphql-tools/url-loader" "^6.8.2" - "@graphql-tools/utils" "^7.0.0" - "@types/http-proxy-agent" "^2.0.2" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.8" "@types/js-yaml" "^4.0.0" "@types/json-stable-stringify" "^1.0.32" - "@types/jsonwebtoken" "^8.5.0" + "@whatwg-node/fetch" "^0.9.0" chalk "^4.1.0" debug "^4.3.1" - dotenv "^8.2.0" - graphql-request "^3.3.0" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - isomorphic-fetch "^3.0.0" + dotenv "^16.0.0" + graphql-request "^6.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.0" + jose "^5.0.0" js-yaml "^4.0.0" json-stable-stringify "^1.0.1" - jsonwebtoken "^8.5.1" lodash "^4.17.20" - replaceall "^0.1.6" scuid "^1.1.0" - tslib "~2.1.0" + tslib "^2.4.0" yaml-ast-parser "^0.0.43" "@graphql-tools/relay-operation-optimizer@^6.3.0": @@ -2092,39 +2193,34 @@ "@graphql-tools/utils" "9.1.3" tslib "^2.4.0" -"@graphql-tools/schema@^7.0.0", "@graphql-tools/schema@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-7.1.5.tgz#07b24e52b182e736a6b77c829fc48b84d89aa711" - integrity sha512-uyn3HSNSckf4mvQSq0Q07CPaVZMNFCYEVxroApOaw802m9DcZPgf9XVPy/gda5GWj9AhbijfRYVTZQgHnJ4CXA== +"@graphql-tools/schema@^10.0.0": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-10.0.2.tgz#21bc2ee25a65fb4890d2e5f9f22ef1f733aa81da" + integrity sha512-TbPsIZnWyDCLhgPGnDjt4hosiNU2mF/rNtSk5BVaXWnZqvKJ6gzJV4fcHcvhRIwtscDMW2/YTnK6dLVnk8pc4w== dependencies: - "@graphql-tools/utils" "^7.1.2" - tslib "~2.2.0" - value-or-promise "1.0.6" - -"@graphql-tools/url-loader@^6", "@graphql-tools/url-loader@^6.0.0", "@graphql-tools/url-loader@^6.8.2": - version "6.10.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-6.10.1.tgz#dc741e4299e0e7ddf435eba50a1f713b3e763b33" - integrity sha512-DSDrbhQIv7fheQ60pfDpGD256ixUQIR6Hhf9Z5bRjVkXOCvO5XrkwoWLiU7iHL81GB1r0Ba31bf+sl+D4nyyfw== - dependencies: - "@graphql-tools/delegate" "^7.0.1" - "@graphql-tools/utils" "^7.9.0" - "@graphql-tools/wrap" "^7.0.4" - "@microsoft/fetch-event-source" "2.0.1" - "@types/websocket" "1.0.2" - abort-controller "3.0.0" - cross-fetch "3.1.4" - extract-files "9.0.0" - form-data "4.0.0" - graphql-ws "^4.4.1" - is-promise "4.0.0" - isomorphic-ws "4.0.1" - lodash "4.17.21" - meros "1.1.4" - subscriptions-transport-ws "^0.9.18" - sync-fetch "0.3.0" - tslib "~2.2.0" - valid-url "1.0.9" - ws "7.4.5" + "@graphql-tools/merge" "^9.0.1" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/url-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-8.0.0.tgz#8d952d5ebb7325e587cb914aaebded3dbd078cf6" + integrity sha512-rPc9oDzMnycvz+X+wrN3PLrhMBQkG4+sd8EzaFN6dypcssiefgWKToXtRKI8HHK68n2xEq1PyrOpkjHFJB+GwA== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/delegate" "^10.0.0" + "@graphql-tools/executor-graphql-ws" "^1.0.0" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/executor-legacy-ws" "^1.0.0" + "@graphql-tools/utils" "^10.0.0" + "@graphql-tools/wrap" "^10.0.0" + "@types/ws" "^8.0.0" + "@whatwg-node/fetch" "^0.9.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.11" + ws "^8.12.0" "@graphql-tools/utils@9.1.3": version "9.1.3" @@ -2143,16 +2239,17 @@ dset "^3.1.2" tslib "^2.4.0" -"@graphql-tools/utils@^6": - version "6.2.4" - resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-6.2.4.tgz#38a2314d2e5e229ad4f78cca44e1199e18d55856" - integrity sha512-ybgZ9EIJE3JMOtTrTd2VcIpTXtDrn2q6eiYkeYMKRVh3K41+LZa6YnR2zKERTXqTWqhobROwLt4BZbw2O3Aeeg== +"@graphql-tools/utils@^10.0.10", "@graphql-tools/utils@^10.0.11", "@graphql-tools/utils@^10.0.2", "@graphql-tools/utils@^10.0.5", "@graphql-tools/utils@^10.0.8": + version "10.0.11" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-10.0.11.tgz#1238fbe37e8d6c662c48ab2477c98269d6fd851a" + integrity sha512-vVjXgKn6zjXIlYBd7yJxCVMYGb5j18gE3hx3Qw3mNsSEsYQXbJbPdlwb7Fc9FogsJei5AaqiQerqH4kAosp1nQ== dependencies: - "@ardatan/aggregate-error" "0.0.6" - camel-case "4.1.1" - tslib "~2.0.1" + "@graphql-typed-document-node/core" "^3.1.1" + cross-inspect "1.0.0" + dset "^3.1.2" + tslib "^2.4.0" -"@graphql-tools/utils@^7.0.0", "@graphql-tools/utils@^7.1.2", "@graphql-tools/utils@^7.5.0", "@graphql-tools/utils@^7.7.0", "@graphql-tools/utils@^7.7.1", "@graphql-tools/utils@^7.8.1", "@graphql-tools/utils@^7.9.0", "@graphql-tools/utils@^7.9.1": +"@graphql-tools/utils@^7.9.1": version "7.10.0" resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-7.10.0.tgz#07a4cb5d1bec1ff1dc1d47a935919ee6abd38699" integrity sha512-d334r6bo9mxdSqZW6zWboEnnOOFRrAPVQJ7LkU8/6grglrbcu6WhwCLzHb90E94JI3TD3ricC3YGbUqIi9Xg0w== @@ -2161,27 +2258,27 @@ camel-case "4.1.2" tslib "~2.2.0" -"@graphql-tools/wrap@^7.0.4": - version "7.0.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-7.0.8.tgz#ad41e487135ca3ea1ae0ea04bb3f596177fb4f50" - integrity sha512-1NDUymworsOlb53Qfh7fonDi2STvqCtbeE68ntKY9K/Ju/be2ZNxrFSbrBHwnxWcN9PjISNnLcAyJ1L5tCUyhg== +"@graphql-tools/wrap@^10.0.0": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-10.0.1.tgz#9e3d27d2723962c26c4377d5d7ab0d3038bf728c" + integrity sha512-Cw6hVrKGM2OKBXeuAGltgy4tzuqQE0Nt7t/uAqnuokSXZhMHXJUb124Bnvxc2gPZn5chfJSDafDe4Cp8ZAVJgg== dependencies: - "@graphql-tools/delegate" "^7.1.5" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.8.1" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/delegate" "^10.0.3" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-typed-document-node/core@3.2.0", "@graphql-typed-document-node/core@^3.1.1", "@graphql-typed-document-node/core@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== "@graphql-typed-document-node/core@^3.0.0": version "3.1.0" resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950" integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg== -"@graphql-typed-document-node/core@^3.1.1": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" - integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== - "@hapi/hoek@^9.0.0": version "9.2.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" @@ -2213,11 +2310,6 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - "@icons/material@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" @@ -2546,11 +2638,6 @@ refractor "^3.3.1" unist-util-visit "^2.0.3" -"@microsoft/fetch-event-source@2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@microsoft/fetch-event-source/-/fetch-event-source-2.0.1.tgz#9ceecc94b49fbaa15666e38ae8587f64acce007d" - integrity sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA== - "@miragejs/graphql@^0.1.11": version "0.1.12" resolved "https://registry.npmjs.org/@miragejs/graphql/-/graphql-0.1.12.tgz#60679c4ad807fc4a001bc88aba396ba3fa5a958b" @@ -2700,6 +2787,33 @@ dependencies: svgmoji "^3.2.0" +"@peculiar/asn1-schema@^2.3.6": + version "2.3.8" + resolved "https://registry.yarnpkg.com/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz#04b38832a814e25731232dd5be883460a156da3b" + integrity sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA== + dependencies: + asn1js "^3.0.5" + pvtsutils "^1.3.5" + tslib "^2.6.2" + +"@peculiar/json-schema@^1.1.12": + version "1.1.12" + resolved "https://registry.yarnpkg.com/@peculiar/json-schema/-/json-schema-1.1.12.tgz#fe61e85259e3b5ba5ad566cb62ca75b3d3cd5339" + integrity sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w== + dependencies: + tslib "^2.0.0" + +"@peculiar/webcrypto@^1.4.0": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@peculiar/webcrypto/-/webcrypto-1.4.3.tgz#078b3e8f598e847b78683dc3ba65feb5029b93a7" + integrity sha512-VtaY4spKTdN5LjJ04im/d/joXuvLbQdgy5Z4DXF4MFZhQ+MTrejbNMkfZBp1Bs3O5+bFqnJgyGdPuZQflvIa5A== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + pvtsutils "^1.3.2" + tslib "^2.5.0" + webcrypto-core "^1.7.7" + "@pmmmwh/react-refresh-webpack-plugin@^0.5.3": version "0.5.10" resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz#2eba163b8e7dbabb4ce3609ab5e32ab63dda3ef8" @@ -3701,6 +3815,11 @@ dependencies: type-fest "^2.0.0" +"@repeaterjs/repeater@^3.0.4": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.5.tgz#b77571685410217a548a9c753aa3cdfc215bfc78" + integrity sha512-l3YHBLAol6d/IKnB9LhpD0cEZWAoe3eFKUyTYWmFmCO2Q/WOckxLQAUyMZWwZV2M/m3+4vgRoaolFqaII82/TA== + "@rollup/plugin-babel@^5.2.0": version "5.3.1" resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" @@ -3748,13 +3867,6 @@ resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" integrity sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw== -"@samverschueren/stream-to-observable@^0.3.0": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" - integrity sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ== - dependencies: - any-observable "^0.3.0" - "@seznam/compose-react-refs@^1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" @@ -3782,11 +3894,6 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== - "@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -3979,13 +4086,6 @@ "@svgr/plugin-svgo" "^5.5.0" loader-utils "^2.0.0" -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - "@testing-library/dom@^7.28.1": version "7.31.0" resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.31.0.tgz#938451abd3ca27e1b69bb395d4a40759fd7f5b3b" @@ -4300,13 +4400,6 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== -"@types/http-proxy-agent@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/http-proxy-agent/-/http-proxy-agent-2.0.2.tgz#942c1f35c7e1f0edd1b6ffae5d0f9051cfb32be1" - integrity sha512-2S6IuBRhqUnH1/AUx9k8KWtY3Esg4eqri946MnxTG5HwehF1S5mqLln8fcyMiuQkY72p2gH3W+rIPqp5li0LyQ== - dependencies: - "@types/node" "*" - "@types/http-proxy@^1.17.5", "@types/http-proxy@^1.17.8": version "1.17.11" resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" @@ -4366,13 +4459,6 @@ resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= -"@types/jsonwebtoken@^8.5.0": - version "8.5.1" - resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#56958cb2d80f6d74352bd2e501a018e2506a8a84" - integrity sha512-rNAPdomlIUX0i0cg2+I+Q1wOUr531zHBQ+cV/28PJ39bSPKjahatZZ2LMuhiguETkCgLVzfruw/ZvNMNkKoSzw== - dependencies: - "@types/node" "*" - "@types/lodash@^4.14.172": version "4.14.195" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.195.tgz#bafc975b252eb6cea78882ce8a7b6bf22a6de632" @@ -4659,10 +4745,10 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== -"@types/websocket@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.2.tgz#d2855c6a312b7da73ed16ba6781815bf30c6187a" - integrity sha512-B5m9aq7cbbD/5/jThEr33nUY8WEfVi6A2YKCTOvw5Ldy7mtsOkqRvGjnzy6g7iMMDsgu7xREuCzqATLDLQVKcQ== +"@types/ws@^8.0.0": + version "8.5.10" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" + integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== dependencies: "@types/node" "*" @@ -5213,6 +5299,57 @@ "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" +"@whatwg-node/events@^0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.0.3.tgz#13a65dd4f5893f55280f766e29ae48074927acad" + integrity sha512-IqnKIDWfXBJkvy/k6tzskWTc2NK3LcqHlb+KHGCrjOCH4jfQckRX0NAiIcC/vIqQkzLYw2r2CTSwAxcrtcD6lA== + +"@whatwg-node/events@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.1.1.tgz#0ca718508249419587e130da26d40e29d99b5356" + integrity sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w== + +"@whatwg-node/fetch@^0.8.0": + version "0.8.8" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.8.8.tgz#48c6ad0c6b7951a73e812f09dd22d75e9fa18cae" + integrity sha512-CdcjGC2vdKhc13KKxgsc6/616BQ7ooDIgPeTuAiE8qfCnS0mGzcfCOoZXypQSz73nxI+GWc7ZReIAVhxoE1KCg== + dependencies: + "@peculiar/webcrypto" "^1.4.0" + "@whatwg-node/node-fetch" "^0.3.6" + busboy "^1.6.0" + urlpattern-polyfill "^8.0.0" + web-streams-polyfill "^3.2.1" + +"@whatwg-node/fetch@^0.9.0": + version "0.9.14" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.9.14.tgz#262039fd8aea52a9c8aac2ec20f316382eae1a3c" + integrity sha512-wurZC82zzZwXRDSW0OS9l141DynaJQh7Yt0FD1xZ8niX7/Et/7RoiLiltbVU1fSF1RR9z6ndEaTUQBAmddTm1w== + dependencies: + "@whatwg-node/node-fetch" "^0.5.0" + urlpattern-polyfill "^9.0.0" + +"@whatwg-node/node-fetch@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.3.6.tgz#e28816955f359916e2d830b68a64493124faa6d0" + integrity sha512-w9wKgDO4C95qnXZRwZTfCmLWqyRnooGjcIwG0wADWjw9/HN0p7dtvtgSvItZtUyNteEvgTrd8QojNEqV6DAGTA== + dependencies: + "@whatwg-node/events" "^0.0.3" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + +"@whatwg-node/node-fetch@^0.5.0": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.5.1.tgz#36a2bc31e5fc8cffa17826c192a8829d4c0ccc1e" + integrity sha512-sQz/s3NyyzIZxQ7PHxDFUMM1k4kQQbi2jU8ILdTbt5+S59ME8aI7XF30O9qohRIIYdSrUvm/OwKQmVP1y6e2WQ== + dependencies: + "@whatwg-node/events" "^0.1.0" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + "@wry/context@^0.6.0": version "0.6.0" resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.6.0.tgz#f903eceb89d238ef7e8168ed30f4511f92d83e06" @@ -5263,13 +5400,6 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== -abort-controller@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" - integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== - dependencies: - event-target-shim "^5.0.0" - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" @@ -5336,6 +5466,21 @@ agent-base@6: dependencies: debug "4" +agent-base@^7.0.2, agent-base@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== + dependencies: + debug "^4.3.4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-formats@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" @@ -5391,12 +5536,7 @@ analytics@^0.8.9: "@analytics/core" "^0.12.7" "@analytics/storage-utils" "^0.4.2" -ansi-escapes@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0, ansi-escapes@^4.3.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== @@ -5408,16 +5548,11 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== -ansi-regex@3.0.1, ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: +ansi-regex@3.0.1, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -5486,11 +5621,6 @@ antd@4.24.7: rc-util "^5.22.5" scroll-into-view-if-needed "^2.2.25" -any-observable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" - integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== - any-promise@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" @@ -5635,6 +5765,15 @@ asn1@~0.2.3: dependencies: safer-buffer "~2.1.0" +asn1js@^3.0.1, asn1js@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/asn1js/-/asn1js-3.0.5.tgz#5ea36820443dbefb51cc7f88a2ebb5b462114f38" + integrity sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ== + dependencies: + pvtsutils "^1.3.2" + pvutils "^1.1.3" + tslib "^2.4.0" + assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" @@ -5650,10 +5789,10 @@ ast-types-flow@^0.0.7: resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= -async-limiter@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" - integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== async-validator@^4.1.0: version "4.2.5" @@ -5942,11 +6081,6 @@ babel-preset-react-app@^10.0.1: babel-plugin-macros "^3.1.0" babel-plugin-transform-react-remove-prop-types "^0.4.24" -backo2@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" - integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= - bail@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" @@ -6012,6 +6146,15 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + blacklist@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/blacklist/-/blacklist-1.1.4.tgz#b2dd09d6177625b2caa69835a37b28995fa9a2f2" @@ -6101,6 +6244,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.12" update-browserslist-db "^1.0.11" +browserslist@^4.21.9: + version "4.22.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" + integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== + dependencies: + caniuse-lite "^1.0.30001565" + electron-to-chromium "^1.4.601" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -6113,17 +6266,12 @@ btoa@^1.2.1: resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.2.1.tgz#01a9909f8b2c93f6bf680ba26131eb30f7fa3d73" integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== -buffer-equal-constant-time@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" - integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk= - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer@^5.7.0: +buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -6136,6 +6284,13 @@ builtin-modules@^3.1.0: resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" @@ -6161,19 +6316,6 @@ cache-base@^1.0.1: union-value "^1.0.0" unset-value "^1.0.0" -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" @@ -6187,14 +6329,6 @@ callsites@^3.0.0: resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -camel-case@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.1.tgz#1fc41c854f00e2f7d0139dfeba1542d6896fe547" - integrity sha512-7fa2WcG4fYFkclIvEmxBbTvmibwF2/agfEBc6q3lOpVu0A13ltLsA+Hr/8Hp6kp5f+G7hKi6t8lys6XxP+1K6Q== - dependencies: - pascal-case "^3.1.1" - tslib "^1.10.0" - camel-case@4.1.2, camel-case@^4.1.1, camel-case@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" @@ -6238,6 +6372,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001508.tgz#4461bbc895c692a96da399639cc1e146e7302a33" integrity sha512-sdQZOJdmt3GJs1UMNpCCCyeuS2IEGLXnHyAo9yIO5JJDjbjoVRij4M1qep6P6gFpptD1PqIYgzM+gwJbOi92mw== +caniuse-lite@^1.0.30001565: + version "1.0.30001566" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz#61a8e17caf3752e3e426d4239c549ebbb37fef0d" + integrity sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA== + capital-case@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" @@ -6267,17 +6406,6 @@ ccount@^1.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@^1.0.0, chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -6295,7 +6423,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6398,7 +6526,7 @@ check-types@^11.1.1: resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== -chokidar@^3.4.2, chokidar@^3.4.3, chokidar@^3.5.3: +chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -6455,12 +6583,10 @@ clean-css@^5.2.2: dependencies: source-map "~0.6.0" -cli-cursor@^2.0.0, cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= - dependencies: - restore-cursor "^2.0.0" +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-cursor@^3.1.0: version "3.1.0" @@ -6469,13 +6595,18 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" -cli-truncate@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" - integrity sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ= +cli-spinners@^2.5.0: + version "2.9.2" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" + integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== + +cli-truncate@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== dependencies: - slice-ansi "0.0.4" - string-width "^1.0.1" + slice-ansi "^3.0.0" + string-width "^4.2.0" cli-width@^3.0.0: version "3.0.0" @@ -6500,6 +6631,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -6509,12 +6649,10 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== clsx@^1.2.1: version "1.2.1" @@ -6535,11 +6673,6 @@ coa@^2.0.2: chalk "^2.4.1" q "^1.1.2" -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - codemirror@^5.62.0: version "5.65.10" resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.65.10.tgz#4276a93b8534ce91f14b733ba9a1ac949666eac9" @@ -6597,7 +6730,7 @@ colord@^2.9.1: resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== -colorette@^2.0.10: +colorette@^2.0.10, colorette@^2.0.16: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -6699,7 +6832,7 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -constant-case@^3.0.3, constant-case@^3.0.4: +constant-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ== @@ -6725,6 +6858,11 @@ convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" @@ -6788,13 +6926,6 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cosmiconfig-toml-loader@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig-toml-loader/-/cosmiconfig-toml-loader-1.0.0.tgz#0681383651cceff918177debe9084c0d3769509b" - integrity sha512-H/2gurFWVi7xXvCyvsWRLCMekl4tITJcX0QEsDMpzxtuxDyM59xLatYNg4s/k9AA/HdtCYfj2su8mgA0GSDLDA== - dependencies: - "@iarna/toml" "^2.2.5" - cosmiconfig-typescript-loader@^1.0.0: version "1.0.9" resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz#69c523f7e8c3d9f27f563d02bbeadaf2f27212d3" @@ -6803,17 +6934,6 @@ cosmiconfig-typescript-loader@^1.0.0: cosmiconfig "^7" ts-node "^10.7.0" -cosmiconfig@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" - integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - cosmiconfig@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" @@ -6836,6 +6956,16 @@ cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" +cosmiconfig@^8.1.0, cosmiconfig@^8.1.3: + version "8.3.6" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== + dependencies: + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" + path-type "^4.0.0" + craco-antd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-2.0.0.tgz#f38977f4de1714e984ad4f68aae2bcce81bdab79" @@ -6883,21 +7013,7 @@ cronstrue@^1.122.0: resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.122.0.tgz#bd6838077b476d28f61d381398b47b8c3912a126" integrity sha512-PFuhZd+iPQQ0AWTXIEYX+t3nFGzBrWxmTWUKJOrsGRewaBSLKZ4I1f8s2kryU75nNxgyugZgiGh2OJsCTA/XlA== -cross-fetch@3.0.6: - version "3.0.6" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.0.6.tgz#3a4040bc8941e653e0e9cf17f29ebcd177d3365c" - integrity sha512-KBPUbqgFjzWlVcURG+Svp9TlhA5uliYtiNx/0r8nv0pdypeQCRJ9IaSIc3q/x3q8t3F75cHuwxVql1HFGHCNJQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.4.tgz#9723f3a3a247bf8b89039f3a380a9244e8fa2f39" - integrity sha512-1eAtFWdIubi6T4XPy6ei9iUFoKpUkIF971QLN8lIvvvwueI65+Nw5haMNKUwfJxabqlIIDODJKGrQ66gxC0PbQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@^3.0.6, cross-fetch@^3.1.5: +cross-fetch@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== @@ -7271,21 +7387,16 @@ data-urls@^2.0.0: whatwg-mimetype "^2.3.0" whatwg-url "^8.0.0" -dataloader@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.0.0.tgz#41eaf123db115987e21ca93c005cd7753c55fe6f" - integrity sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ== +dataloader@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.2.2.tgz#216dc509b5abe39d43a9b9d97e6e5e473dfbe3e0" + integrity sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g== date-fns@2.x: version "2.29.3" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.29.3.tgz#27402d2fc67eb442b511b70bbdf98e6411cd68a8" integrity sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA== -date-fns@^1.27.2: - version "1.30.1" - resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" - integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== - dayjs@1.x, dayjs@^1.11.7: version "1.11.7" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.7.tgz#4b296922642f70999544d1144a2c25730fce63e2" @@ -7339,23 +7450,11 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" - dedent@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deep-is@^0.1.3, deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -7373,10 +7472,12 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" define-lazy-prop@^2.0.0: version "2.0.0" @@ -7433,11 +7534,6 @@ dependency-graph@^0.11.0: resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" integrity sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg== -dependency-graph@^0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318" - integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w== - dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" @@ -7660,6 +7756,11 @@ dotenv@^10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotenv@^16.0.0: + version "16.3.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + dotenv@^8.2.0: version "8.6.0" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" @@ -7670,11 +7771,6 @@ dset@^3.1.2: resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.3.tgz#c194147f159841148e8e34ca41f638556d9542d2" integrity sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ== -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - duplexer@^0.1.2, duplexer@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -7688,13 +7784,6 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -ecdsa-sig-formatter@1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" - integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== - dependencies: - safe-buffer "^5.0.1" - ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -7712,10 +7801,10 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.441.tgz#94dd9c1cbf081d83f032a4f1cd9f787e21fc24ce" integrity sha512-LlCgQ8zgYZPymf5H4aE9itwiIWH4YlCiv1HFLmmcBeFYi5E+3eaIFnjHzYtcFQbaKfAW+CqZ9pgxo33DZuoqPg== -elegant-spinner@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" - integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= +electron-to-chromium@^1.4.601: + version "1.4.601" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.601.tgz#cac69868548aee89961ffe63ff5a7716f0685b75" + integrity sha512-SpwUMDWe9tQu8JX5QCO1+p/hChAi9AE9UpoC3rcHVc+gdCGlbT3SGb5I1klgb952HRIyvt9wZhSz9bNBYz9swA== emittery@^0.10.2: version "0.10.2" @@ -7772,13 +7861,6 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - enhanced-resolve@^5.15.0: version "5.15.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" @@ -7874,7 +7956,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -8207,16 +8289,6 @@ event-stream@=3.3.4: stream-combiner "~0.0.4" through "~2.3.1" -event-target-shim@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" - integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== - -eventemitter3@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -8355,10 +8427,10 @@ extract-domain@2.2.1: resolved "https://registry.yarnpkg.com/extract-domain/-/extract-domain-2.2.1.tgz#1deeae633a5cbf05ae2fd7b3ff87cb98cbc4cb5b" integrity sha512-lOq1adCJha0tFFBci4quxC4XLa6+Rs2WgAwTo9qbO9OsElvJmGgCvOzmHo/yg5CiqeP4+sHjkXYGkrCcIEprMg== -extract-files@9.0.0, extract-files@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-9.0.0.tgz#8a7744f2437f81f5ed3250ed9f1550de902fe54a" - integrity sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ== +extract-files@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-11.0.0.tgz#b72d428712f787eef1f5193aff8ab5351ca8469a" + integrity sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ== extsprintf@1.3.0: version "1.3.0" @@ -8380,15 +8452,20 @@ faker@5.5.3: resolved "https://registry.npmjs.org/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e" integrity sha512-wLTv2a28wjUyWkbnX7u/ABZBkUkIF2fCd73V6P2oFqEGEktDfzWx4UxrSqtPRw0xPRAcjeAOIiJWqZm3pP4u3g== +fast-decode-uri-component@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz#46f8b6c22b30ff7a81357d4f59abfae938202543" + integrity sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== +fast-glob@^3.2.11: + version "3.3.0" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" + integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8396,10 +8473,10 @@ fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: merge2 "^1.3.0" micromatch "^4.0.4" -fast-glob@^3.2.11: - version "3.3.0" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" - integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== +fast-glob@^3.2.12, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8422,11 +8499,25 @@ fast-loops@^1.1.3: resolved "https://registry.yarnpkg.com/fast-loops/-/fast-loops-1.1.3.tgz#ce96adb86d07e7bf9b4822ab9c6fac9964981f75" integrity sha512-8EZzEP0eKkEEVX+drtd9mtuQ+/QrlfW/5MlwcwK5Nds6EkZ/tRzEexkzUY2mIssnAyVLT+TKHuRXmFNNXYUd6g== +fast-querystring@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/fast-querystring/-/fast-querystring-1.1.2.tgz#a6d24937b4fc6f791b4ee31dcb6f53aeafb89f53" + integrity sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg== + dependencies: + fast-decode-uri-component "^1.0.1" + fast-shallow-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fast-shallow-equal/-/fast-shallow-equal-1.0.0.tgz#d4dcaf6472440dcefa6f88b98e3251e27f25628b" integrity sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw== +fast-url-parser@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + fastest-stable-stringify@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/fastest-stable-stringify/-/fastest-stable-stringify-2.0.2.tgz#3757a6774f6ec8de40c4e86ec28ea02417214c76" @@ -8478,21 +8569,6 @@ fbjs@^3.0.0: setimmediate "^1.0.5" ua-parser-js "^0.7.30" -figures@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" - integrity sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4= - dependencies: - escape-string-regexp "^1.0.5" - object-assign "^4.1.0" - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= - dependencies: - escape-string-regexp "^1.0.5" - figures@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -8680,19 +8756,19 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0, form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" mime-types "^2.1.12" -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" @@ -8836,20 +8912,6 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - get-stream@^6.0.0: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" @@ -8946,19 +9008,7 @@ globals@^13.19.0: dependencies: type-fest "^0.20.2" -globby@11.0.3: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - -globby@^11.0.4, globby@^11.1.0: +globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -8981,23 +9031,6 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" -got@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -9013,31 +9046,30 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -graphql-config@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-3.3.0.tgz#24c3672a427cb67c0c717ca3b9d70e9f0c9e752b" - integrity sha512-mSQIsPMssr7QrgqhnjI+CyVH6oQgCrgS6irHsTvwf7RFDRnR2k9kqpQOQgVoOytBSn0DOYryS0w0SAg9xor/Jw== - dependencies: - "@endemolshinegroup/cosmiconfig-typescript-loader" "3.0.2" - "@graphql-tools/graphql-file-loader" "^6.0.0" - "@graphql-tools/json-file-loader" "^6.0.0" - "@graphql-tools/load" "^6.0.0" - "@graphql-tools/merge" "^6.0.0" - "@graphql-tools/url-loader" "^6.0.0" - "@graphql-tools/utils" "^7.0.0" - cosmiconfig "7.0.0" - cosmiconfig-toml-loader "1.0.0" - minimatch "3.0.4" - string-env-interpolation "1.0.1" - -graphql-request@^3.3.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-3.4.0.tgz#3a400cd5511eb3c064b1873afb059196bbea9c2b" - integrity sha512-acrTzidSlwAj8wBNO7Q/UQHS8T+z5qRGquCQRv9J1InwR01BBWV9ObnoE+JS5nCCEj8wSGS0yrDXVDoRiKZuOg== +graphql-config@^5.0.2: + version "5.0.3" + resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-5.0.3.tgz#d9aa2954cf47a927f9cb83cdc4e42ae55d0b321e" + integrity sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ== + dependencies: + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/merge" "^9.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + cosmiconfig "^8.1.0" + jiti "^1.18.2" + minimatch "^4.2.3" + string-env-interpolation "^1.0.1" + tslib "^2.4.0" + +graphql-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-6.1.0.tgz#f4eb2107967af3c7a5907eb3131c671eac89be4f" + integrity sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw== dependencies: - cross-fetch "^3.0.6" - extract-files "^9.0.0" - form-data "^3.0.0" + "@graphql-typed-document-node/core" "^3.2.0" + cross-fetch "^3.1.5" graphql-tag@2.10.3: version "2.10.3" @@ -9051,10 +9083,10 @@ graphql-tag@^2.10.1, graphql-tag@^2.11.0, graphql-tag@^2.12.0: dependencies: tslib "^2.1.0" -graphql-ws@^4.4.1: - version "4.5.1" - resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-4.5.1.tgz#d9dc6e047c6d4ddb928ccbfb3ca3022580a89925" - integrity sha512-GE7vCMKe2D7fc0ugkM1V8QMneHcbV9c3BpPBzdlW/Uzkqv0F/zZq9DDHxLzg55ZhE5OSLL+n/gyqAMPgH59hcw== +graphql-ws@^5.14.0: + version "5.14.2" + resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-5.14.2.tgz#7db6f6138717a544d9480f0213f65f2841ed1c52" + integrity sha512-LycmCwhZ+Op2GlHz4BZDsUYHKRiiUz+3r9wbhBATMETNlORQJAaFlAgTFoeRh6xQoQegwYwIylVD1Qns9/DA3w== graphql.macro@^1.4.2: version "1.4.2" @@ -9101,13 +9133,6 @@ harmony-reflect@^1.4.6: resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -9395,11 +9420,6 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" - integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== - http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" @@ -9440,6 +9460,14 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" +http-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz#e9096c5afd071a3fce56e6252bb321583c124673" + integrity sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ== + dependencies: + agent-base "^7.1.0" + debug "^4.3.4" + http-proxy-middleware@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.0.tgz#20d1ac3409199c83e5d0383ba6436b04e7acb9fe" @@ -9488,6 +9516,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz#e2645b846b90e96c6e6f347fb5b2e41f1590b09b" + integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -9541,7 +9577,7 @@ ieee754@^1.1.13: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.1.4, ignore@^5.2.0: +ignore@^5.2.0: version "5.2.4" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== @@ -9561,7 +9597,7 @@ immutable@~3.7.6: resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.7.6.tgz#13b4d3cb12befa15482a26fe1b2ebae640071e4b" integrity sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw== -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== @@ -9569,13 +9605,6 @@ import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-from@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/import-from/-/import-from-3.0.0.tgz#055cfec38cd5a27d8057ca51376d7d3bf0891966" - integrity sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ== - dependencies: - resolve-from "^5.0.0" - import-from@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" @@ -9594,11 +9623,6 @@ imurmurhash@^0.1.4: resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= -indent-string@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" - integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= - indent-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" @@ -9627,7 +9651,7 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= -ini@^1.3.5, ini@~1.3.0: +ini@^1.3.5: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== @@ -9645,24 +9669,26 @@ inline-style-prefixer@^6.0.0: css-in-js-utils "^3.1.0" fast-loops "^1.1.3" -inquirer@^7.3.3: - version "7.3.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== +inquirer@^8.0.0: + version "8.2.6" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562" + integrity sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg== dependencies: ansi-escapes "^4.2.1" - chalk "^4.1.0" + chalk "^4.1.1" cli-cursor "^3.1.0" cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.19" + lodash "^4.17.21" mute-stream "0.0.8" + ora "^5.4.1" run-async "^2.4.0" - rxjs "^6.6.0" + rxjs "^7.5.5" string-width "^4.1.0" strip-ansi "^6.0.0" through "^2.3.6" + wrap-ansi "^6.0.1" internal-slot@^1.0.3: version "1.0.3" @@ -9852,18 +9878,6 @@ is-finite@~1.0.1: dependencies: number-is-nan "^1.0.0" -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - is-fullwidth-code-point@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" @@ -9874,14 +9888,7 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: +is-glob@4.0.3, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== @@ -9900,6 +9907,11 @@ is-integer@~1.0.4: dependencies: is-finite "^1.0.0" +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + is-lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-lower-case/-/is-lower-case-2.0.2.tgz#1c0884d3012c841556243483aa5d522f47396d2a" @@ -9939,13 +9951,6 @@ is-obj@^1.0.1: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= -is-observable@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" - integrity sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA== - dependencies: - symbol-observable "^1.1.0" - is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -9973,16 +9978,6 @@ is-potential-custom-element-name@^1.0.1: resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== -is-promise@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" - integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== - -is-promise@^2.1.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" - integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== - is-regex@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" @@ -10015,11 +10010,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - is-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" @@ -10114,18 +10104,10 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -isomorphic-fetch@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" - integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== - dependencies: - node-fetch "^2.6.1" - whatwg-fetch "^3.4.1" - -isomorphic-ws@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" - integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== +isomorphic-ws@5.0.0, isomorphic-ws@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" + integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== isomorphic.js@^0.2.4: version "0.2.5" @@ -10179,11 +10161,6 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -iterall@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" - integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== - jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" @@ -10691,6 +10668,11 @@ jest@^27.4.3: import-local "^3.0.2" jest-cli "^27.5.1" +jiti@^1.17.1: + version "1.21.0" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" + integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== + jiti@^1.18.2: version "1.18.2" resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" @@ -10707,6 +10689,11 @@ joi@^17.11.0: "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" +jose@^5.0.0: + version "5.1.3" + resolved "https://registry.yarnpkg.com/jose/-/jose-5.1.3.tgz#303959d85c51b5cb14725f930270b72be56abdca" + integrity sha512-GPExOkcMsCLBTi1YetY2LmkoY559fss0+0KVa6kOfb2YFe84nAM7Nm/XzuZozah4iHgmBGrCOHL5/cy670SBRw== + js-cookie@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" @@ -10780,11 +10767,6 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= - json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" @@ -10844,7 +10826,7 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.1.2, json5@^2.2.0, json5@^2.2.2: +json5@^2.1.2, json5@^2.2.0, json5@^2.2.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -10875,22 +10857,6 @@ jsonpointer@^5.0.0: resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== -jsonwebtoken@^8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" - integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== - dependencies: - jws "^3.2.2" - lodash.includes "^4.3.0" - lodash.isboolean "^3.0.3" - lodash.isinteger "^4.0.4" - lodash.isnumber "^3.0.3" - lodash.isplainobject "^4.0.6" - lodash.isstring "^4.0.1" - lodash.once "^4.0.0" - ms "^2.1.1" - semver "^5.6.0" - jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" @@ -10921,30 +10887,6 @@ just-extend@^4.0.2: resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== -jwa@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" - integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== - dependencies: - buffer-equal-constant-time "1.0.1" - ecdsa-sig-formatter "1.0.11" - safe-buffer "^5.0.1" - -jws@^3.2.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" - integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== - dependencies: - jwa "^1.4.1" - safe-buffer "^5.0.1" - -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -10991,13 +10933,6 @@ language-tags@=1.0.5: dependencies: language-subtag-registry "~0.3.2" -latest-version@5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== - dependencies: - package-json "^6.3.0" - launch-editor@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" @@ -11082,49 +11017,19 @@ lines-and-columns@^1.1.6: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= -listr-silent-renderer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" - integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= - -listr-update-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" - integrity sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA== - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - elegant-spinner "^1.0.1" - figures "^1.7.0" - indent-string "^3.0.0" - log-symbols "^1.0.2" - log-update "^2.3.0" - strip-ansi "^3.0.1" - -listr-verbose-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" - integrity sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw== - dependencies: - chalk "^2.4.1" - cli-cursor "^2.1.0" - date-fns "^1.27.2" - figures "^2.0.0" - -listr@^0.14.3: - version "0.14.3" - resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" - integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== - dependencies: - "@samverschueren/stream-to-observable" "^0.3.0" - is-observable "^1.1.0" - is-promise "^2.1.0" - is-stream "^1.1.0" - listr-silent-renderer "^1.1.1" - listr-update-renderer "^0.5.0" - listr-verbose-renderer "^0.5.0" - p-map "^2.0.0" - rxjs "^6.3.3" +listr2@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-4.0.5.tgz#9dcc50221583e8b4c71c43f9c7dfd0ef546b75d5" + integrity sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA== + dependencies: + cli-truncate "^2.1.0" + colorette "^2.0.16" + log-update "^4.0.0" + p-map "^4.0.0" + rfdc "^1.3.0" + rxjs "^7.5.5" + through "^2.3.8" + wrap-ansi "^7.0.0" loader-runner@^4.2.0: version "4.3.0" @@ -11212,7 +11117,7 @@ lodash.forin@^4.4.0: resolved "https://registry.npmjs.org/lodash.forin/-/lodash.forin-4.4.0.tgz#5d3f20ae564011fbe88381f7d98949c9c9519731" integrity sha1-XT8grlZAEfvog4H32YlJyclRlzE= -lodash.get@^4, lodash.get@^4.4.2: +lodash.get@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= @@ -11222,21 +11127,11 @@ lodash.has@^4.5.2: resolved "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz#d19f4dc1095058cccbe2b0cdf4ee0fe4aa37c862" integrity sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI= -lodash.includes@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" - integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= - lodash.invokemap@^4.6.0: version "4.6.0" resolved "https://registry.npmjs.org/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" integrity sha1-F0jNpdiw74NpxOs+xUwh/rofLWI= -lodash.isboolean@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" - integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY= - lodash.isempty@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e" @@ -11257,21 +11152,11 @@ lodash.isinteger@^4.0.4: resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= -lodash.isnumber@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" - integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w= - lodash.isplainobject@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= -lodash.isstring@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" - integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= - lodash.lowerfirst@^4.3.1: version "4.3.1" resolved "https://registry.npmjs.org/lodash.lowerfirst/-/lodash.lowerfirst-4.3.1.tgz#de3c7b12e02c6524a0059c2f6cb7c5c52655a13d" @@ -11297,11 +11182,6 @@ lodash.merge@^4.6.2: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.once@^4.0.0: - version "4.1.1" - resolved "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" - integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= - lodash.pick@4.4.0, lodash.pick@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" @@ -11332,19 +11212,12 @@ lodash.values@^4.3.0: resolved "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= -lodash@4.17.21, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: +lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -log-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" - integrity sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg= - dependencies: - chalk "^1.0.0" - -log-symbols@^4.0.0: +log-symbols@^4.0.0, log-symbols@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== @@ -11352,14 +11225,15 @@ log-symbols@^4.0.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" -log-update@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" - integrity sha1-iDKP19HOeTiykoN0bwsbwSayRwg= +log-update@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" + integrity sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== dependencies: - ansi-escapes "^3.0.0" - cli-cursor "^2.0.0" - wrap-ansi "^3.0.1" + ansi-escapes "^4.3.0" + cli-cursor "^3.1.0" + slice-ansi "^4.0.0" + wrap-ansi "^6.2.0" longest-streak@^2.0.0: version "2.0.4" @@ -11380,23 +11254,13 @@ lower-case-first@^2.0.2: dependencies: tslib "^2.0.3" -lower-case@^2.0.1, lower-case@^2.0.2: +lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== dependencies: tslib "^2.0.3" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - lowlight@^1.17.0: version "1.20.0" resolved "https://registry.yarnpkg.com/lowlight/-/lowlight-1.20.0.tgz#ddb197d33462ad0d93bf19d17b6c301aa3941888" @@ -11446,7 +11310,7 @@ make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: dependencies: semver "^6.0.0" -make-error@^1, make-error@^1.1.1, make-error@^1.3.6: +make-error@^1.1.1, make-error@^1.3.6: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -11652,10 +11516,10 @@ merge2@^1.3.0, merge2@^1.4.1: resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -meros@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/meros/-/meros-1.1.4.tgz#c17994d3133db8b23807f62bec7f0cb276cfd948" - integrity sha512-E9ZXfK9iQfG9s73ars9qvvvbSIkJZF5yOo9j4tcwM5tN8mUKfj/EKN5PzOr3ZH0y5wL7dLAHw3RVEfpQV9Q7VQ== +meros@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/meros/-/meros-1.3.0.tgz#c617d2092739d55286bf618129280f362e6242f2" + integrity sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w== messageformat-parser@^4.1.3: version "4.1.3" @@ -11764,21 +11628,11 @@ mime@1.6.0, mime@^1.4.1: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^1.0.0, mimic-response@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - min-document@^2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" @@ -11811,7 +11665,7 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^5.0.1: +minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^4.2.3, minimatch@^5.0.1: version "3.0.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== @@ -11875,11 +11729,6 @@ mkdirp@^0.5.1, mkdirp@~0.5.1: dependencies: minimist "^1.2.5" -mkdirp@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - mocked-env@1.3.2: version "1.3.2" resolved "https://registry.npmjs.org/mocked-env/-/mocked-env-1.3.2.tgz#548eb2fde141d083de70dc6b231cd9f3210d8731" @@ -12049,11 +11898,6 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" -node-fetch@2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" - integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== - node-fetch@2.6.7, node-fetch@^2.6.1: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -12076,6 +11920,11 @@ node-releases@^2.0.12: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== + normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" @@ -12093,11 +11942,6 @@ normalize-range@^0.1.2: resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - normalize-url@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" @@ -12258,20 +12102,13 @@ on-headers@~1.0.2: resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== -once@^1.3.0, once@^1.3.1, once@^1.4.0: +once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= - dependencies: - mimic-fn "^1.0.0" - onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -12328,6 +12165,21 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + orderedmap@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-2.1.0.tgz#819457082fa3a06abd316d83a281a1ca467437cd" @@ -12338,11 +12190,6 @@ os-tmpdir@~1.0.2: resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - p-limit@3.1.0, p-limit@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" @@ -12378,10 +12225,12 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" p-retry@^4.5.0: version "4.6.2" @@ -12396,16 +12245,6 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" @@ -13183,11 +13022,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= - pretender@^3.4.3: version "3.4.3" resolved "https://registry.npmjs.org/pretender/-/pretender-3.4.3.tgz#a3b4160516007075d29127262f3a0063d19896e9" @@ -13493,19 +13327,28 @@ psl@^1.1.28, psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +pvtsutils@^1.3.2, pvtsutils@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/pvtsutils/-/pvtsutils-1.3.5.tgz#b8705b437b7b134cd7fd858f025a23456f1ce910" + integrity sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA== + dependencies: + tslib "^2.6.1" + +pvutils@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" + integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== + q@^1.1.2: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" @@ -13959,16 +13802,6 @@ rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: rc-resize-observer "^1.0.0" rc-util "^5.15.0" -rc@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-app-polyfill@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" @@ -14335,6 +14168,15 @@ readable-stream@^3.0.6: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -14440,20 +14282,6 @@ regexpu-core@^5.3.1: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" -registry-auth-token@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" - integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== - dependencies: - rc "^1.2.8" - -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== - dependencies: - rc "^1.2.8" - regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" @@ -14642,11 +14470,6 @@ repeat-string@^1.0.0, repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= -replaceall@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/replaceall/-/replaceall-0.1.6.tgz#81d81ac7aeb72d7f5c4942adf2697a3220688d8e" - integrity sha1-gdgax663LX9cSUKt8ml6MiBojY4= - request@^2.88.2: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" @@ -14759,21 +14582,6 @@ resolve@^2.0.0-next.4: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= - dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -14797,6 +14605,11 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rfdc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" + integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -14873,14 +14686,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== - dependencies: - tslib "^1.9.0" - -rxjs@^7.8.1: +rxjs@^7.5.5, rxjs@^7.8.1: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -15037,11 +14843,16 @@ semver@^5.6.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" @@ -15224,10 +15035,23 @@ slash@^4.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== -slice-ansi@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" - integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU= +slice-ansi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" snake-case@^3.0.4: version "3.0.4" @@ -15332,7 +15156,7 @@ source-map-resolve@^0.6.0: atob "^2.1.2" decode-uri-component "^0.2.0" -source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.20: +source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -15536,6 +15360,11 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + strict-uri-encode@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" @@ -15546,7 +15375,7 @@ string-convert@^0.2.0: resolved "https://registry.yarnpkg.com/string-convert/-/string-convert-0.2.1.tgz#6982cc3049fbb4cd85f8b24568b9d9bf39eeff97" integrity sha1-aYLMMEn7tM2F+LJFaLnZvznu/5c= -string-env-interpolation@1.0.1, string-env-interpolation@^1.0.1: +string-env-interpolation@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz#ad4397ae4ac53fe6c91d1402ad6f6a52862c7152" integrity sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg== @@ -15572,23 +15401,6 @@ string-natural-compare@^3.0.1: resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" @@ -15598,6 +15410,15 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" +string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" @@ -15662,20 +15483,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -15717,7 +15524,7 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^2.0.1, strip-json-comments@~2.0.1: +strip-json-comments@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= @@ -15768,17 +15575,6 @@ stylis@4.1.3, stylis@^4.0.6: resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.3.tgz#fd2fbe79f5fed17c55269e16ed8da14c84d069f7" integrity sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA== -subscriptions-transport-ws@^0.9.18: - version "0.9.18" - resolved "https://registry.yarnpkg.com/subscriptions-transport-ws/-/subscriptions-transport-ws-0.9.18.tgz#bcf02320c911fbadb054f7f928e51c6041a37b97" - integrity sha512-tztzcBTNoEbuErsVQpTN2xUNN/efAZXyCyL5m3x4t6SKrEiTL2N8SaKWBFWM4u56pL79ULif3zjyeq+oV+nOaA== - dependencies: - backo2 "^1.0.2" - eventemitter3 "^3.1.0" - iterall "^1.2.1" - symbol-observable "^1.0.4" - ws "^5.2.0" - sucrase@^3.32.0: version "3.32.0" resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" @@ -15792,11 +15588,6 @@ sucrase@^3.32.0: pirates "^4.0.1" ts-interface-checker "^0.1.9" -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -15887,11 +15678,6 @@ swap-case@^2.0.2: dependencies: tslib "^2.0.3" -symbol-observable@^1.0.4, symbol-observable@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" - integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== - symbol-observable@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-2.0.3.tgz#5b521d3d07a43c351055fa43b8355b62d33fd16a" @@ -15902,14 +15688,6 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -sync-fetch@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/sync-fetch/-/sync-fetch-0.3.0.tgz#77246da949389310ad978ab26790bb05f88d1335" - integrity sha512-dJp4qg+x4JwSEW1HibAuMi0IIrBI3wuQr2GimmqB7OXR50wmwzfdusG+p39R9w3R6aFtZ2mzvxvWKQ3Bd/vx3g== - dependencies: - buffer "^5.7.0" - node-fetch "^2.6.1" - tailwindcss@^3.0.2: version "3.3.2" resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" @@ -16039,7 +15817,7 @@ throttle-debounce@^3.0.1: resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== -through@2, through@^2.3.6, through@~2.3, through@~2.3.1: +through@2, through@^2.3.6, through@^2.3.8, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= @@ -16095,11 +15873,6 @@ to-object-path@^0.3.0: dependencies: kind-of "^3.0.2" -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" @@ -16222,18 +15995,6 @@ ts-node@^10.7.0: v8-compile-cache-lib "^3.0.1" yn "3.1.1" -ts-node@^9: - version "9.1.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" - integrity sha512-hPlt7ZACERQGf03M253ytLY3dHbGNGrAq9qIHWUY9XHYl1z7wYngSr3OQ5xmui8o2AaxsONxIzjafLUiWBo1Fg== - dependencies: - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - source-map-support "^0.5.17" - yn "3.1.1" - tsconfig-paths@^3.14.1: version "3.14.2" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" @@ -16244,26 +16005,26 @@ tsconfig-paths@^3.14.1: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.10.0, tslib@^1.8.1: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2, tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== +tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.1, tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + tslib@~2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.3.tgz#8e0741ac45fc0c226e58a17bfc3e64b9bc6ca61c" integrity sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ== -tslib@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" - integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== - tslib@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" @@ -16509,10 +16270,10 @@ universalify@^2.0.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== -unixify@1.0.0: +unixify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090" - integrity sha1-OmQcjC/7zk2mg6XHDwOkYpQMIJA= + integrity sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg== dependencies: normalize-path "^2.1.1" @@ -16547,6 +16308,14 @@ update-browserslist-db@^1.0.11: escalade "^3.1.1" picocolors "^1.0.0" +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + upper-case-first@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324" @@ -16573,12 +16342,15 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" +urlpattern-polyfill@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz#99f096e35eff8bf4b5a2aa7d58a1523d6ebc7ce5" + integrity sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ== + +urlpattern-polyfill@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-9.0.0.tgz#bc7e386bb12fd7898b58d1509df21d3c29ab3460" + integrity sha512-WHN8KDQblxd32odxeIgo83rdVDE2bvdkb86it7bMhYZwWKJz0+O0RK/eZiHYnM+zgt/U7hAHOlCQGfjjvSkw2g== use-callback-ref@^1.2.5: version "1.3.0" @@ -16661,20 +16433,15 @@ v8-to-istanbul@^8.1.0: convert-source-map "^1.6.0" source-map "^0.7.3" -valid-url@1.0.9, valid-url@^1.0.9: - version "1.0.9" - resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" - integrity sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA= - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== -value-or-promise@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.6.tgz#218aa4794aa2ee24dcf48a29aba4413ed584747f" - integrity sha512-9r0wQsWD8z/BxPOvnwbPf05ZvFngXyouE9EKB+5GbYix+BYnAwrIChCUyFIinfbf2FL/U71z+CPpbnmTdxrwBg== +value-or-promise@^1.0.11, value-or-promise@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.12.tgz#0e5abfeec70148c78460a849f6b003ea7986f15c" + integrity sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q== vary@~1.1.2: version "1.1.2" @@ -16770,16 +16537,39 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + web-namespaces@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== +web-streams-polyfill@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + web-vitals@^0.2.4: version "0.2.4" resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-0.2.4.tgz#ec3df43c834a207fd7cdefd732b2987896e08511" integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg== +webcrypto-core@^1.7.7: + version "1.7.7" + resolved "https://registry.yarnpkg.com/webcrypto-core/-/webcrypto-core-1.7.7.tgz#06f24b3498463e570fed64d7cab149e5437b162c" + integrity sha512-7FjigXNsBfopEj+5DV2nhNpfic2vumtjjgPmeDKk45z+MJwXKKfhPB7118Pfzrmh4jqOMST6Ch37iPAHoImg5g== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + asn1js "^3.0.1" + pvtsutils "^1.3.2" + tslib "^2.4.0" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -16935,7 +16725,7 @@ whatwg-encoding@^1.0.5: dependencies: iconv-lite "0.4.24" -whatwg-fetch@^3.4.1, whatwg-fetch@^3.6.2: +whatwg-fetch@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== @@ -17180,15 +16970,7 @@ workbox-window@6.6.1: "@types/trusted-types" "^2.0.2" workbox-core "6.6.1" -wrap-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-3.0.1.tgz#288a04d87eda5c286e060dfe8f135ce8d007f8ba" - integrity sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo= - dependencies: - string-width "^2.1.1" - strip-ansi "^4.0.0" - -wrap-ansi@^6.2.0: +wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== @@ -17221,17 +17003,10 @@ write-file-atomic@^3.0.0: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" -ws@7.4.5: - version "7.4.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" - integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== - -ws@^5.2.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.2.tgz#dffef14866b8e8dc9133582514d1befaf96e980f" - integrity sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA== - dependencies: - async-limiter "~1.0.0" +ws@8.14.2, ws@^8.12.0: + version "8.14.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" + integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== ws@^7.4.6: version "7.5.9" @@ -17308,6 +17083,11 @@ yaml@^2.1.1: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== +yaml@^2.3.1: + version "2.3.4" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" + integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== + yamljs@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/yamljs/-/yamljs-0.3.0.tgz#dc060bf267447b39f7304e9b2bfbe8b5a7ddb03b" @@ -17329,6 +17109,11 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + yargs@^15.3.1: version "15.4.1" resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" @@ -17346,7 +17131,7 @@ yargs@^15.3.1: y18n "^4.0.0" yargs-parser "^18.1.2" -yargs@^16.1.1, yargs@^16.2.0: +yargs@^16.2.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== @@ -17359,6 +17144,19 @@ yargs@^16.1.1, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.0.0: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yjs@^13.5.23: version "13.5.44" resolved "https://registry.yarnpkg.com/yjs/-/yjs-13.5.44.tgz#1c79ec7407963e07f44174cffcfde5b58a62b0da" From c0ef72886828044c40eb9db8a140e7e8afecb2d1 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Mon, 4 Dec 2023 13:21:42 -0500 Subject: [PATCH 20/23] fix(ingest/powerbi): Allow old parser to parse [db].[schema].[table] table references (#9360) --- .../ingestion/source/powerbi/config.py | 1 + .../source/powerbi/m_query/resolver.py | 28 +++++--- .../tests/unit/test_powerbi_parser.py | 65 +++++++++++++++++++ 3 files changed, 84 insertions(+), 10 deletions(-) create mode 100644 metadata-ingestion/tests/unit/test_powerbi_parser.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index b8cc34c234ffa4..f71afac737ca61 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -314,6 +314,7 @@ class PowerBiDashboardSourceConfig( description="Configure how is ownership ingested", ) modified_since: Optional[str] = pydantic.Field( + default=None, description="Get only recently modified workspaces based on modified_since datetime '2023-02-10T00:00:00.0000000Z', excludePersonalWorkspaces and excludeInActiveWorkspaces limit to last 30 days", ) extract_dashboards: bool = pydantic.Field( diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py index e200ff41f71c25..930841f1f0df2b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py @@ -617,16 +617,25 @@ def create_urn_using_old_parser( tables: List[str] = native_sql_parser.get_tables(query) - for table in tables: - schema_and_table: List[str] = table.split(".") - if len(schema_and_table) == 1: - # schema name is not present. set default schema - schema_and_table.insert(0, MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA) - - qualified_table_name = ( - f"{db_name}.{schema_and_table[0]}.{schema_and_table[1]}" - ) + for parsed_table in tables: + # components: List[str] = [v.strip("[]") for v in parsed_table.split(".")] + components = [v.strip("[]") for v in parsed_table.split(".")] + if len(components) == 3: + database, schema, table = components + elif len(components) == 2: + schema, table = components + database = db_name + elif len(components) == 1: + (table,) = components + database = db_name + schema = MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA + else: + logger.warning( + f"Unsupported table format found {parsed_table} in query {query}" + ) + continue + qualified_table_name = f"{database}.{schema}.{table}" urn = urn_creator( config=self.config, platform_instance_resolver=self.platform_instance_resolver, @@ -634,7 +643,6 @@ def create_urn_using_old_parser( server=server, qualified_table_name=qualified_table_name, ) - dataplatform_tables.append( DataPlatformTable( data_platform_pair=self.get_platform_pair(), diff --git a/metadata-ingestion/tests/unit/test_powerbi_parser.py b/metadata-ingestion/tests/unit/test_powerbi_parser.py new file mode 100644 index 00000000000000..e53e8d7aee16fa --- /dev/null +++ b/metadata-ingestion/tests/unit/test_powerbi_parser.py @@ -0,0 +1,65 @@ +import pytest + +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.source.powerbi.config import PowerBiDashboardSourceConfig +from datahub.ingestion.source.powerbi.dataplatform_instance_resolver import ( + ResolvePlatformInstanceFromDatasetTypeMapping, +) +from datahub.ingestion.source.powerbi.m_query.resolver import ( + MSSqlDataPlatformTableCreator, +) + + +@pytest.fixture +def creator(): + config = PowerBiDashboardSourceConfig( + tenant_id="test-tenant-id", + client_id="test-client-id", + client_secret="test-client-secret", + ) + return MSSqlDataPlatformTableCreator( + ctx=PipelineContext(run_id="test-run-id"), + config=config, + platform_instance_resolver=ResolvePlatformInstanceFromDatasetTypeMapping( + config + ), + ) + + +def test_parse_three_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM [dwhdbt].[dbo2].[my_table] where oper_day_date > getdate() - 5", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,dwhdbt.dbo2.my_table,PROD)" + ) + + +def test_parse_two_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_schema.my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.my_schema.my_table,PROD)" + ) + + +def test_parse_one_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.dbo.my_table,PROD)" + ) From 4ec3208918791b517a6d18c41905ee2dbe189a12 Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 14:31:58 -0500 Subject: [PATCH 21/23] feat(ingest): support stdin in `datahub put` (#9359) --- .../src/datahub/cli/ingest_cli.py | 4 +++ metadata-ingestion/src/datahub/cli/put_cli.py | 27 ++++++++++--------- .../src/datahub/cli/specific/file_loader.py | 1 + .../datahub/configuration/config_loader.py | 22 ++++++++++----- .../src/datahub/configuration/json_loader.py | 11 ++++++++ .../source/metadata/business_glossary.py | 2 +- .../ingestion/source/metadata/lineage.py | 2 +- .../tests/unit/config/test_config_loader.py | 9 +++++-- 8 files changed, 55 insertions(+), 23 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/json_loader.py diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index dd0287004a3686..b7827ec9f050b4 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -147,6 +147,9 @@ async def run_pipeline_to_completion(pipeline: Pipeline) -> int: squirrel_original_config=True, squirrel_field="__raw_config", allow_stdin=True, + allow_remote=True, + process_directives=True, + resolve_env_vars=True, ) raw_pipeline_config = pipeline_config.pop("__raw_config") @@ -268,6 +271,7 @@ def deploy( pipeline_config = load_config_file( config, allow_stdin=True, + allow_remote=True, resolve_env_vars=False, ) diff --git a/metadata-ingestion/src/datahub/cli/put_cli.py b/metadata-ingestion/src/datahub/cli/put_cli.py index 6a1d82388dc2a1..324d7f94db258e 100644 --- a/metadata-ingestion/src/datahub/cli/put_cli.py +++ b/metadata-ingestion/src/datahub/cli/put_cli.py @@ -1,11 +1,11 @@ -import json import logging -from typing import Any, Optional +from typing import Optional import click from click_default_group import DefaultGroup from datahub.cli.cli_utils import post_entity +from datahub.configuration.config_loader import load_config_file from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import get_default_graph from datahub.metadata.schema_classes import ( @@ -36,22 +36,23 @@ def put() -> None: @click.option("--urn", required=True, type=str) @click.option("-a", "--aspect", required=True, type=str) @click.option("-d", "--aspect-data", required=True, type=str) -@click.pass_context @upgrade.check_upgrade @telemetry.with_telemetry() -def aspect(ctx: Any, urn: str, aspect: str, aspect_data: str) -> None: +def aspect(urn: str, aspect: str, aspect_data: str) -> None: """Update a single aspect of an entity""" entity_type = guess_entity_type(urn) - with open(aspect_data) as fp: - aspect_obj = json.load(fp) - status = post_entity( - urn=urn, - aspect_name=aspect, - entity_type=entity_type, - aspect_value=aspect_obj, - ) - click.secho(f"Update succeeded with status {status}", fg="green") + aspect_obj = load_config_file( + aspect_data, allow_stdin=True, resolve_env_vars=False, process_directives=False + ) + + status = post_entity( + urn=urn, + aspect_name=aspect, + entity_type=entity_type, + aspect_value=aspect_obj, + ) + click.secho(f"Update succeeded with status {status}", fg="green") @put.command() diff --git a/metadata-ingestion/src/datahub/cli/specific/file_loader.py b/metadata-ingestion/src/datahub/cli/specific/file_loader.py index a9787343fdb911..cad32eb0a22a18 100644 --- a/metadata-ingestion/src/datahub/cli/specific/file_loader.py +++ b/metadata-ingestion/src/datahub/cli/specific/file_loader.py @@ -21,5 +21,6 @@ def load_file(config_file: Path) -> Union[dict, list]: squirrel_original_config=False, resolve_env_vars=False, allow_stdin=False, + process_directives=False, ) return res diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 30ca4ff6aed2d1..2f41af6f7286e6 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -11,6 +11,7 @@ from expandvars import UnboundVariable, expandvars from datahub.configuration.common import ConfigurationError, ConfigurationMechanism +from datahub.configuration.json_loader import JsonConfigurationMechanism from datahub.configuration.toml import TomlConfigurationMechanism from datahub.configuration.yaml import YamlConfigurationMechanism @@ -100,33 +101,42 @@ def load_config_file( squirrel_original_config: bool = False, squirrel_field: str = "__orig_config", allow_stdin: bool = False, - resolve_env_vars: bool = True, - process_directives: bool = True, + allow_remote: bool = True, # TODO: Change the default to False. + resolve_env_vars: bool = True, # TODO: Change the default to False. + process_directives: bool = False, ) -> dict: config_mech: ConfigurationMechanism if allow_stdin and config_file == "-": # If we're reading from stdin, we assume that the input is a YAML file. + # Note that YAML is a superset of JSON, so this will also read JSON files. config_mech = YamlConfigurationMechanism() raw_config_file = sys.stdin.read() else: config_file_path = pathlib.Path(config_file) if config_file_path.suffix in {".yaml", ".yml"}: config_mech = YamlConfigurationMechanism() + elif config_file_path.suffix == ".json": + config_mech = JsonConfigurationMechanism() elif config_file_path.suffix == ".toml": config_mech = TomlConfigurationMechanism() else: raise ConfigurationError( - f"Only .toml and .yml are supported. Cannot process file type {config_file_path.suffix}" + f"Only .toml, .yml, and .json are supported. Cannot process file type {config_file_path.suffix}" ) + url_parsed = parse.urlparse(str(config_file)) - if url_parsed.scheme in ("http", "https"): # URLs will return http/https + if allow_remote and url_parsed.scheme in ( + "http", + "https", + ): # URLs will return http/https + # If the URL is remote, we need to fetch it. try: response = requests.get(str(config_file)) raw_config_file = response.text except Exception as e: raise ConfigurationError( - f"Cannot read remote file {config_file_path}, error:{e}" - ) + f"Cannot read remote file {config_file_path}: {e}" + ) from e else: if not config_file_path.is_file(): raise ConfigurationError(f"Cannot open config file {config_file_path}") diff --git a/metadata-ingestion/src/datahub/configuration/json_loader.py b/metadata-ingestion/src/datahub/configuration/json_loader.py new file mode 100644 index 00000000000000..35667eb5951fc7 --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/json_loader.py @@ -0,0 +1,11 @@ +import json +from typing import IO + +from datahub.configuration import ConfigurationMechanism + + +class JsonConfigurationMechanism(ConfigurationMechanism): + """Ability to load configuration from json files""" + + def load_config(self, config_fp: IO) -> dict: + return json.load(config_fp) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 97877df63707f5..6baa70aa581d62 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -495,7 +495,7 @@ def create(cls, config_dict, ctx): def load_glossary_config( cls, file_name: Union[str, pathlib.Path] ) -> BusinessGlossaryConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) glossary_cfg = BusinessGlossaryConfig.parse_obj(config) return glossary_cfg diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py index f33c6e0edae3dc..659444fe610e03 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py @@ -147,7 +147,7 @@ def create( @staticmethod def load_lineage_config(file_name: str) -> LineageConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) lineage_config = LineageConfig.parse_obj(config) return lineage_config diff --git a/metadata-ingestion/tests/unit/config/test_config_loader.py b/metadata-ingestion/tests/unit/config/test_config_loader.py index 3253c96b876aa9..f9a4076e18363d 100644 --- a/metadata-ingestion/tests/unit/config/test_config_loader.py +++ b/metadata-ingestion/tests/unit/config/test_config_loader.py @@ -134,7 +134,7 @@ def test_load_success(pytestconfig, filename, golden_config, env, referenced_env assert list_referenced_env_variables(raw_config) == referenced_env_vars with mock.patch.dict(os.environ, env): - loaded_config = load_config_file(filepath) + loaded_config = load_config_file(filepath, resolve_env_vars=True) assert loaded_config == golden_config # TODO check referenced env vars @@ -183,7 +183,12 @@ def test_write_file_directive(pytestconfig): fake_ssl_key = "my-secret-key-value" with mock.patch.dict(os.environ, {"DATAHUB_SSL_KEY": fake_ssl_key}): - loaded_config = load_config_file(filepath, squirrel_original_config=False) + loaded_config = load_config_file( + filepath, + squirrel_original_config=False, + resolve_env_vars=True, + process_directives=True, + ) # Check that the rest of the dict is unmodified. diff = deepdiff.DeepDiff( From 7517c77ffdbafc193dc7529881fc42ebe3f2ab2a Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:11 -0500 Subject: [PATCH 22/23] fix(ingest): resolve issue with caplog and asyncio (#9377) --- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- .../tests/unit/api/source_helpers/test_source_helpers.py | 9 +++++++-- .../tests/unit/utilities/test_perf_timer.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 4e91d17feaa9f0..93c405f0a39f2f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -550,7 +550,7 @@ def resolve_includes( @dataclass class LookerViewFile: absolute_file_path: str - connection: Optional[str] + connection: Optional[LookerConnectionDefinition] includes: List[str] resolved_includes: List[ProjectInclude] views: List[Dict] diff --git a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py index b667af8bb41e98..26e8639bed6e7f 100644 --- a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py +++ b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py @@ -3,6 +3,7 @@ from typing import Any, Dict, Iterable, List, Union from unittest.mock import patch +import pytest from freezegun import freeze_time import datahub.metadata.schema_classes as models @@ -482,7 +483,7 @@ def test_auto_browse_path_v2_dry_run(telemetry_ping_mock): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics(caplog): +def test_auto_empty_dataset_usage_statistics(caplog: pytest.LogCaptureFixture) -> None: has_urn = make_dataset_urn("my_platform", "has_aspect") empty_urn = make_dataset_urn("my_platform", "no_aspect") config = BaseTimeWindowConfig() @@ -499,6 +500,7 @@ def test_auto_empty_dataset_usage_statistics(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( @@ -530,7 +532,9 @@ def test_auto_empty_dataset_usage_statistics(caplog): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): +def test_auto_empty_dataset_usage_statistics_invalid_timestamp( + caplog: pytest.LogCaptureFixture, +) -> None: urn = make_dataset_urn("my_platform", "my_dataset") config = BaseTimeWindowConfig() wus = [ @@ -546,6 +550,7 @@ def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( diff --git a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py index d5fde314c2b57a..6129b3e37d8bc3 100644 --- a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py +++ b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py @@ -5,7 +5,7 @@ from datahub.utilities.perf_timer import PerfTimer -approx = partial(pytest.approx, rel=1e-2) +approx = partial(pytest.approx, rel=2e-2) def test_perf_timer_simple(): From 0d9aa2641014f36611e0d740dcd0df563df0984d Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:57 -0500 Subject: [PATCH 23/23] fix(ingest/airflow): compat with pluggy 1.0 (#9365) --- docs/lineage/airflow.md | 14 ++++++++ .../datahub_airflow_plugin/_airflow_shims.py | 5 +++ .../_datahub_listener_module.py | 35 ++++++++++++++++--- .../datahub_airflow_plugin/datahub_plugin.py | 6 ++-- .../airflow-plugin/tox.ini | 6 +++- 5 files changed, 57 insertions(+), 9 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 8fd38f560bfbb5..da3a36bc87be53 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -246,6 +246,20 @@ If your URLs aren't being generated correctly (usually they'll start with `http: base_url = http://airflow.mycorp.example.com ``` +### TypeError ... missing 3 required positional arguments + +If you see errors like the following with the v2 plugin: + +```shell +ERROR - on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +Traceback (most recent call last): + File "/home/airflow/.local/lib/python3.8/site-packages/datahub_airflow_plugin/datahub_listener.py", line 124, in wrapper + f(*args, **kwargs) +TypeError: on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +``` + +The solution is to upgrade `acryl-datahub-airflow-plugin>=0.12.0.4` or upgrade `pluggy>=1.2.0`. See this [PR](https://github.com/datahub-project/datahub/pull/9365) for details. + ## Compatibility We no longer officially support Airflow <2.1. However, you can use older versions of `acryl-datahub-airflow-plugin` with older versions of Airflow. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py index 10f014fbd586f5..d384958cf3ddb5 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -2,6 +2,7 @@ import airflow.version import packaging.version +import pluggy from airflow.models.baseoperator import BaseOperator from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED @@ -27,9 +28,13 @@ # Approach suggested by https://stackoverflow.com/a/11887885/5004662. AIRFLOW_VERSION = packaging.version.parse(airflow.version.version) +PLUGGY_VERSION = packaging.version.parse(pluggy.__version__) HAS_AIRFLOW_STANDALONE_CMD = AIRFLOW_VERSION >= packaging.version.parse("2.2.0.dev0") HAS_AIRFLOW_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.3.0.dev0") HAS_AIRFLOW_DAG_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.5.0.dev0") +NEEDS_AIRFLOW_LISTENER_MODULE = AIRFLOW_VERSION < packaging.version.parse( + "2.5.0.dev0" +) or PLUGGY_VERSION <= packaging.version.parse("1.0.0") def get_task_inlets(operator: "Operator") -> List: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py index f39d37b1222285..e16563400e397f 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py @@ -1,7 +1,34 @@ -from datahub_airflow_plugin.datahub_listener import get_airflow_plugin_listener +from datahub_airflow_plugin.datahub_listener import ( + get_airflow_plugin_listener, + hookimpl, +) _listener = get_airflow_plugin_listener() if _listener: - on_task_instance_running = _listener.on_task_instance_running - on_task_instance_success = _listener.on_task_instance_success - on_task_instance_failed = _listener.on_task_instance_failed + # The run_in_thread decorator messes with pluggy's interface discovery, + # which causes the hooks to be called with no arguments and results in TypeErrors. + # This is only an issue with Pluggy <= 1.0.0. + # See https://github.com/pytest-dev/pluggy/issues/358 + # Note that pluggy 1.0.0 is in the constraints file for Airflow 2.4 and 2.5. + + @hookimpl + def on_task_instance_running(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_running(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_success(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_success(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_failed(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_failed(previous_state, task_instance, session) + + if hasattr(_listener, "on_dag_run_running"): + + @hookimpl + def on_dag_run_running(dag_run, session): + assert _listener + _listener.on_dag_run_running(dag_run, session) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index c96fab31647f50..2b0b751bd787b7 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -6,8 +6,8 @@ from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED from datahub_airflow_plugin._airflow_shims import ( - HAS_AIRFLOW_DAG_LISTENER_API, HAS_AIRFLOW_LISTENER_API, + NEEDS_AIRFLOW_LISTENER_MODULE, ) assert AIRFLOW_PATCHED @@ -50,7 +50,7 @@ class DatahubPlugin(AirflowPlugin): name = "datahub_plugin" if _USE_AIRFLOW_LISTENER_INTERFACE: - if HAS_AIRFLOW_DAG_LISTENER_API: + if not NEEDS_AIRFLOW_LISTENER_MODULE: from datahub_airflow_plugin.datahub_listener import ( # type: ignore[misc] get_airflow_plugin_listener, ) @@ -60,8 +60,6 @@ class DatahubPlugin(AirflowPlugin): else: # On Airflow < 2.5, we need the listener to be a module. # This is just a quick shim layer to make that work. - # The DAG listener API was added at the same time as this method - # was fixed, so we're reusing the same check variable. # # Related Airflow change: https://github.com/apache/airflow/pull/27113. import datahub_airflow_plugin._datahub_listener_module as _listener_module # type: ignore[misc] diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 2f05854940d104..1010bd2933e452 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -14,7 +14,11 @@ deps = # Airflow version airflow21: apache-airflow~=2.1.0 airflow22: apache-airflow~=2.2.0 - airflow24: apache-airflow~=2.4.0 + # On Airflow 2.4 and 2.5, Airflow's constraints file pins pluggy to 1.0.0, + # which has caused issues for us before. As such, we now pin it explicitly + # to prevent regressions. + # See https://github.com/datahub-project/datahub/pull/9365 + airflow24: apache-airflow~=2.4.0,pluggy==1.0.0 airflow26: apache-airflow~=2.6.0 airflow27: apache-airflow~=2.7.0 commands =