From ed1d35c79bb41d06ce646ef44bf7ad810fd229b6 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 26 Sep 2023 00:10:49 -0400 Subject: [PATCH 1/9] fix(build): run codegen when building datahub-ingestion image (#8869) --- .github/workflows/docker-unified.yml | 14 +++++++------- docker/datahub-ingestion-base/Dockerfile | 4 ++-- docker/datahub-ingestion/Dockerfile | 2 +- docker/datahub-ingestion/Dockerfile-slim-only | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 13c921e953c324..de3e0ca93e6b7e 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -58,7 +58,7 @@ jobs: echo "full_tag=$(get_tag)-full" >> $GITHUB_OUTPUT echo "unique_tag=$(get_unique_tag)" >> $GITHUB_OUTPUT echo "unique_slim_tag=$(get_unique_tag)-slim" >> $GITHUB_OUTPUT - echo "unique_full_tag=$(get_unique_tag)-full" >> $GITHUB_OUTPUT + echo "unique_full_tag=$(get_unique_tag)" >> $GITHUB_OUTPUT echo "python_release_version=$(get_python_docker_release_v)" >> $GITHUB_OUTPUT - name: Check whether publishing enabled id: publish @@ -501,7 +501,7 @@ jobs: platforms: linux/amd64,linux/arm64/v8 - name: Compute DataHub Ingestion (Base-Slim) Tag id: tag - run: echo "tag=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT datahub_ingestion_base_full_build: name: Build and Push DataHub Ingestion (Base-Full) Docker Image runs-on: ubuntu-latest @@ -567,13 +567,13 @@ jobs: datahub-ingestion: - 'docker/datahub-ingestion/**' - name: Build codegen - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' }} + if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.setup.outputs.publish != 'true' && steps.filter.outputs.datahub-ingestion-base == 'true' }} with: - image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head' }} + image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} - name: Build and push Slim Image if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} uses: ./.github/actions/docker-custom-build-and-push @@ -583,7 +583,7 @@ jobs: ${{ env.DATAHUB_INGESTION_IMAGE }} build-args: | BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }} - DOCKER_VERSION=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head' }} + DOCKER_VERSION=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} RELEASE_VERSION=${{ needs.setup.outputs.python_release_version }} APP_ENV=slim tags: ${{ needs.setup.outputs.slim_tag }} @@ -595,7 +595,7 @@ jobs: platforms: linux/amd64,linux/arm64/v8 - name: Compute Tag id: tag - run: echo "tag=${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.unique_slim_tag || 'head' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT datahub_ingestion_slim_scan: permissions: contents: read # for actions/checkout to fetch code @@ -650,7 +650,7 @@ jobs: datahub-ingestion: - 'docker/datahub-ingestion/**' - name: Build codegen - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' }} + if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 3d47f796173704..564cc19cc9a5f9 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -1,7 +1,7 @@ ARG APP_ENV=full ARG BASE_IMAGE=base -FROM golang:1-alpine3.17 AS binary +FROM golang:1-alpine3.17 AS dockerize-binary ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder @@ -41,7 +41,7 @@ RUN apt-get update && apt-get install -y -qq \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* # compiled against newer golang for security fixes -COPY --from=binary /go/bin/dockerize /usr/local/bin +COPY --from=dockerize-binary /go/bin/dockerize /usr/local/bin COPY ./docker/datahub-ingestion-base/base-requirements.txt requirements.txt COPY ./docker/datahub-ingestion-base/entrypoint.sh /entrypoint.sh diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 8b726df5e88420..0132ceaa9b1a95 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -1,7 +1,7 @@ # Defining environment ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base -ARG DOCKER_VERSION=latest +ARG DOCKER_VERSION=head FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index 9ae116f839aa07..cb8c27ab463c48 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -1,6 +1,6 @@ # Defining environment ARG BASE_IMAGE=acryldata/datahub-ingestion-base -ARG DOCKER_VERSION=latest +ARG DOCKER_VERSION=head-slim FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 From 0a869dd6f8784d50039da308313946d399b0c8ce Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 26 Sep 2023 10:28:03 +0200 Subject: [PATCH 2/9] fix(ingest/s3): Converting windows style path to posix one on local fs (#8757) --- .../src/datahub/ingestion/source/s3/source.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py index ab5d3a4e007ac0..ac4433b7eb1f0c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/s3/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/s3/source.py @@ -7,6 +7,7 @@ import time from collections import OrderedDict from datetime import datetime +from pathlib import PurePath from typing import Any, Dict, Iterable, List, Optional, Tuple from more_itertools import peekable @@ -819,7 +820,10 @@ def local_browser(self, path_spec: PathSpec) -> Iterable[Tuple[str, datetime, in dirs.sort(key=functools.cmp_to_key(partitioned_folder_comparator)) for file in sorted(files): - full_path = os.path.join(root, file) + # We need to make sure the path is in posix style which is not true on windows + full_path = PurePath( + os.path.normpath(os.path.join(root, file)) + ).as_posix() yield full_path, datetime.utcfromtimestamp( os.path.getmtime(full_path) ), os.path.getsize(full_path) From 9972d51205383a632bb549ad1f380561c2eeb83a Mon Sep 17 00:00:00 2001 From: John Joyce Date: Tue, 26 Sep 2023 08:40:32 -0700 Subject: [PATCH 3/9] fix(docs): Rebranding custom to custom SQL (#8896) --- docs-website/sidebars.js | 2 +- ...assertions.md => custom-sql-assertions.md} | 74 +++++++++---------- 2 files changed, 38 insertions(+), 38 deletions(-) rename docs/managed-datahub/observe/{custom-assertions.md => custom-sql-assertions.md} (77%) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 03ea38fd622d4d..06396d60882778 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -437,7 +437,7 @@ module.exports = { Observability: [ "docs/managed-datahub/observe/freshness-assertions", "docs/managed-datahub/observe/volume-assertions", - "docs/managed-datahub/observe/custom-assertions", + "docs/managed-datahub/observe/custom-sql-assertions", ], }, ], diff --git a/docs/managed-datahub/observe/custom-assertions.md b/docs/managed-datahub/observe/custom-sql-assertions.md similarity index 77% rename from docs/managed-datahub/observe/custom-assertions.md rename to docs/managed-datahub/observe/custom-sql-assertions.md index e221cf1058fd08..d4a09b434ca799 100644 --- a/docs/managed-datahub/observe/custom-assertions.md +++ b/docs/managed-datahub/observe/custom-sql-assertions.md @@ -4,12 +4,12 @@ description: This page provides an overview of working with DataHub SQL Assertio import FeatureAvailability from '@site/src/components/FeatureAvailability'; -# Custom Assertions +# Custom SQL Assertions -> ⚠️ The **Custom Assertions** feature is currently in private beta, part of the **Acryl Observe** module, and may only be available to a +> ⚠️ The **Custom SQL Assertions** feature is currently in private beta, part of the **Acryl Observe** module, and may only be available to a > limited set of design partners. > > If you are interested in trying it and providing feedback, please reach out to your Acryl Customer Success @@ -27,18 +27,18 @@ changes to key metric definitions, etc. Often times, these changes break importa like reporting dashboards or data-driven product features. What if you could reduce the time to detect these incidents, so that the people responsible for the data were made aware of data -issues _before_ anyone else? With Acryl DataHub **Custom Assertions**, you can. +issues _before_ anyone else? With Acryl DataHub **Custom SQL Assertions**, you can. Acryl DataHub allows users to define complex expectations about a particular warehouse Table through custom SQL queries, and then monitor those expectations over time as the table grows and changes. -In this article, we'll cover the basics of monitoring Custom Assertions - what they are, how to configure them, and more - so that you and your team can +In this article, we'll cover the basics of monitoring Custom SQL Assertions - what they are, how to configure them, and more - so that you and your team can start building trust in your most important data assets. Let's get started! ## Support -Custom Assertions are currently supported for: +Custom SQL Assertions are currently supported for: 1. Snowflake 2. Redshift @@ -50,24 +50,24 @@ tab. > Note that SQL Assertions are not yet supported if you are connecting to your warehouse > using the DataHub CLI or a Remote Ingestion Executor. -## What is a Custom Assertion? +## What is a Custom SQL Assertion? -A **Custom Assertion** is a highly configurable Data Quality rule used to monitor a Data Warehouse Table -for unexpected or sudden changes in its meaning. Custom Assertions are defined through a raw SQL query that is evaluated against +A **Custom SQL Assertion** is a highly configurable Data Quality rule used to monitor a Data Warehouse Table +for unexpected or sudden changes in its meaning. Custom SQL Assertions are defined through a raw SQL query that is evaluated against the Table. You have full control over the SQL query, and can use any SQL features supported by your Data Warehouse. -Custom Assertions can be particularly useful when you have complex tables or relationships +Custom SQL Assertions can be particularly useful when you have complex tables or relationships that are used to generate important metrics or reports, and where the meaning of the table is expected to be stable over time. -If you have existing SQL queries that you already use to monitor your data, you may find that Custom Assertions are an easy way to port them +If you have existing SQL queries that you already use to monitor your data, you may find that Custom SQL Assertions are an easy way to port them to Acryl DataHub to get started. For example, imagine that you have a Table that tracks the number of purchases made on your company's e-commerce web store. You have a SQL query that you use to calculate the number of purchases made in the past 24 hours, and you'd like to monitor this -metric over time to ensure that it is always greater than 1000. You can use a Custom Assertion to do this! +metric over time to ensure that it is always greater than 1000. You can use a Custom SQL Assertion to do this! -### Anatomy of a Custom Assertion +### Anatomy of a Custom SQL Assertion -At the most basic level, **Custom Assertions** consist of a few important parts: +At the most basic level, **Custom SQL Assertions** consist of a few important parts: 1. An **Evaluation Schedule** 2. A **Query** @@ -86,8 +86,8 @@ minutes in an hour. #### 2. Query -The **Query**: This is the SQL query that will be used to evaluate the Table. The query should return a single row with a single column. Currently only numeric values are supported (integer and floats). The query can be as simple or as complex as you'd like, and can use any SQL features supported by your Data Warehouse. This requires that the configured user account has read access to the asset. Make sure to use the fully qualified name of the Table in your query. - +The **Query**: This is the SQL query that will be used to evaluate the Table. The query should return a **single row** containing a **single numeric column** (integers, floats). +The query can be as simple or as complex as you'd like, and can use any SQL features supported by your Data Warehouse. This requires that the configured user account has read access to the asset. Make sure to use the fully qualified name of the Table in your query. Use the "Try it out" button to test your query and ensure that it returns a single row with a single column. The query will be run against the Table in the context of the configured user account, so ensure that the user has read access to the Table. @@ -99,29 +99,29 @@ The **Condition Type**: This defines the conditions under which the Assertion wi - **Is Not Equal To**: The assertion will fail if the query result is not equal to the configured value - **Is Greater Than**: The assertion will fail if the query result is greater than the configured value - **Is Less Than**: The assertion will fail if the query result is less than the configured value -- **Is outside a range**: The assertion will fail if the query result is outside the configured range +- **Is Outside a Range**: The assertion will fail if the query result is outside the configured range - **Grows More Than**: The assertion will fail if the query result grows more than the configured range. This can be either a percentage (**Percentage**) or a number (**Value**). - **Grows Less Than**: The assertion will fail if the query result grows less than the configured percentage. This can be either a percentage (**Percentage**) or a number (**Value**). - **Growth is outside a range**: The assertion will fail if the query result growth is outside the configured range. This can be either a percentage (**Percentage**) or a number (**Value**). -Custom Assertions also have an off switch: they can be started or stopped at any time with the click of button. +Custom SQL Assertions also have an off switch: they can be started or stopped at any time with the click of button. #### 4. Assertion Description The **Assertion Description**: This is a human-readable description of the Assertion. It should be used to describe the meaning of the Assertion, and can be used to provide additional context to users who are viewing the Assertion. -## Creating a Custom Assertion +## Creating a Custom SQL Assertion ### Prerequisites -1. **Permissions**: To create or delete Custom Assertions for a specific entity on DataHub, you'll need to be granted the +1. **Permissions**: To create or delete Custom SQL Assertions for a specific entity on DataHub, you'll need to be granted the `Edit Assertions` and `Edit Monitors` privileges for the entity. This is granted to Entity owners by default. -2. **Data Platform Connection**: In order to create a Custom Assertion, you'll need to have an **Ingestion Source** configured to your +2. **Data Platform Connection**: In order to create a Custom SQL Assertion, you'll need to have an **Ingestion Source** configured to your Data Platform: Snowflake, BigQuery, or Redshift under the **Integrations** tab. -Once these are in place, you're ready to create your Custom Assertions! +Once these are in place, you're ready to create your Custom SQL Assertions! ### Steps @@ -168,23 +168,23 @@ Once these are in place, you're ready to create your Custom Assertions!

10. Click **Next** -11. Configure actions that should be taken when the Custom Assertion passes or fails +11. Configure actions that should be taken when the Custom SQL Assertion passes or fails

-- **Raise incident**: Automatically raise a new DataHub Incident for the Table whenever the Custom Assertion is failing. This +- **Raise incident**: Automatically raise a new DataHub Incident for the Table whenever the Custom SQL Assertion is failing. This may indicate that the Table is unfit for consumption. Configure Slack Notifications under **Settings** to be notified when an incident is created due to an Assertion failure. -- **Resolve incident**: Automatically resolved any incidents that were raised due to failures in this Custom Assertion. Note that +- **Resolve incident**: Automatically resolved any incidents that were raised due to failures in this Custom SQL Assertion. Note that any other incidents will not be impacted. 1. Click **Save**. -And that's it! DataHub will now begin to monitor your Custom Assertion for the table. +And that's it! DataHub will now begin to monitor your Custom SQL Assertion for the table. -To view the time of the next Custom Assertion evaluation, simply click **Custom** and then click on your +To view the time of the next Custom SQL Assertion evaluation, simply click **Custom** and then click on your new Assertion:

@@ -198,12 +198,12 @@ Once your assertion has run, you will begin to see Success or Failure status for

-## Stopping a Custom Assertion +## Stopping a Custom SQL Assertion -In order to temporarily stop the evaluation of a Custom Assertion: +In order to temporarily stop the evaluation of a Custom SQL Assertion: 1. Navigate to the **Validations** tab of the Table with the assertion -2. Click **Custom** to open the Custom Assertions list +2. Click **Custom** to open the Custom SQL Assertions list 3. Click the three-dot menu on the right side of the assertion you want to disable 4. Click **Stop** @@ -211,16 +211,16 @@ In order to temporarily stop the evaluation of a Custom Assertion:

-To resume the Custom Assertion, simply click **Turn On**. +To resume the Custom SQL Assertion, simply click **Turn On**.

-## Creating Custom Assertions via API +## Creating Custom SQL Assertions via API -Under the hood, Acryl DataHub implements Custom Assertion Monitoring using two "entity" concepts: +Under the hood, Acryl DataHub implements Custom SQL Assertion Monitoring using two "entity" concepts: - **Assertion**: The specific expectation for the custom assertion, e.g. "The table was changed in the past 7 hours" or "The table is changed on a schedule of every day by 8am". This is the "what". @@ -233,15 +233,15 @@ Note that to create or delete Assertions and Monitors for a specific entity on D #### GraphQL -In order to create a Custom Assertion that is being monitored on a specific **Evaluation Schedule**, you'll need to use 2 -GraphQL mutation queries to create a Custom Assertion entity and create an Assertion Monitor entity responsible for evaluating it. +In order to create a Custom SQL Assertion that is being monitored on a specific **Evaluation Schedule**, you'll need to use 2 +GraphQL mutation queries to create a Custom SQL Assertion entity and create an Assertion Monitor entity responsible for evaluating it. -Start by creating the Custom Assertion entity using the `createSqlAssertion` query and hang on to the 'urn' field of the Assertion entity +Start by creating the Custom SQL Assertion entity using the `createSqlAssertion` query and hang on to the 'urn' field of the Assertion entity you get back. Then continue by creating a Monitor entity using the `createAssertionMonitor`. ##### Examples -To create a Custom Assertion Entity that checks whether a query result is greater than 100: +To create a Custom SQL Assertion Entity that checks whether a query result is greater than 100: ```json mutation createSqlAssertion { @@ -265,7 +265,7 @@ mutation createSqlAssertion { } ``` -The supported custom assertion types are `METRIC` and `METRIC_CHANGE`. If you choose `METRIC_CHANGE`, +The supported assertion types are `METRIC` and `METRIC_CHANGE`. If you choose `METRIC_CHANGE`, you will need to provide a `changeType` parameter with either `ABSOLUTE` or `PERCENTAGE` values. The supported operator types are `EQUAL_TO`, `NOT_EQUAL_TO`, `GREATER_THAN`, `GREATER_THAN_OR_EQUAL_TO`, `LESS_THAN`, `LESS_THAN_OR_EQUAL_TO`, and `BETWEEN` (requires minValue, maxValue). The supported parameter types are `NUMBER`. From 54ec12a866868406e83464b21bab5147d11bc5fa Mon Sep 17 00:00:00 2001 From: Zachary McNellis Date: Tue, 26 Sep 2023 11:41:36 -0400 Subject: [PATCH 4/9] docs(observability): Freshness Assertion Operation Types (#8907) --- docs/managed-datahub/observe/freshness-assertions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index c5d4ca9081b43d..82de423f6f2de2 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -125,7 +125,7 @@ Change Source types vary by the platform, but generally fall into these categori - **DataHub Operation**: A DataHub "Operation" aspect contains timeseries information used to describe changes made to an entity. Using this option avoids contacting your data platform, and instead uses the DataHub Operation metadata to evaluate Freshness Assertions. This relies on Operations being reported to DataHub, either via ingestion or via use of the DataHub APIs (see [Report Operation via API](#reporting-operations-via-api)). - Note if you have not configured an ingestion source through DataHub, then this may be the only option available. + Note if you have not configured an ingestion source through DataHub, then this may be the only option available. By default, any operation type found will be considered a valid change. Use the **Operation Types** dropdown when selecting this option to specify which operation types should be considered valid changes. You may choose from one of DataHub's standard Operation Types, or specify a "Custom" Operation Type by typing in the name of the Operation Type. Using either of the column value approaches (**Last Modified Column** or **High Watermark Column**) to determine whether a Table has changed can be useful because it can be customized to determine whether specific types of important changes have been made to a given Table. Because it does not involve system warehouse tables, it is also easily portable across Data Warehouse and Data Lake providers. From 2e1afaf7a49acbccef9d90047c104479a266cf49 Mon Sep 17 00:00:00 2001 From: siddiquebagwan Date: Tue, 26 Sep 2023 22:20:53 +0530 Subject: [PATCH 5/9] doc(ingestion): looker & lookml ingestion guide (#8006) Co-authored-by: MohdSiddiqueBagwan Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs-website/sidebars.js | 7 + .../looker/configuration.md | 212 ++++++++++++++++++ .../quick-ingestion-guides/looker/overview.md | 52 +++++ docs/quick-ingestion-guides/looker/setup.md | 156 +++++++++++++ 4 files changed, 427 insertions(+) create mode 100644 docs/quick-ingestion-guides/looker/configuration.md create mode 100644 docs/quick-ingestion-guides/looker/overview.md create mode 100644 docs/quick-ingestion-guides/looker/setup.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 06396d60882778..b07cd0b03ce118 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -81,6 +81,13 @@ module.exports = { "docs/quick-ingestion-guides/powerbi/configuration", ], }, + { + Looker: [ + "docs/quick-ingestion-guides/looker/overview", + "docs/quick-ingestion-guides/looker/setup", + "docs/quick-ingestion-guides/looker/configuration", + ], + }, ], }, { diff --git a/docs/quick-ingestion-guides/looker/configuration.md b/docs/quick-ingestion-guides/looker/configuration.md new file mode 100644 index 00000000000000..d9ba1907b006ef --- /dev/null +++ b/docs/quick-ingestion-guides/looker/configuration.md @@ -0,0 +1,212 @@ +--- +title: Configuration +--- +# Configuring Looker & LookML Connector + +Now that you have created a DataHub-specific API key with the relevant access in [the prior step](setup.md), it's time to set up a connection via the DataHub UI. + +## Configure Secrets + +You must create two secrets to configure a connection with Looker or LookerML. + +* `LOOKER_CLIENT_ID` +* `LOOKER_CLIENT_SECRET` + +On your DataHub instance, navigate to the **Ingestion** tab in your screen's top right corner. + +

+ Navigate to the "Ingestion Tab" +

+ +:::note +If you do not see the Ingestion tab, please get in touch with your DataHub admin to grant you the correct permissions. +::: + +Navigate to the **Secrets** tab and click **Create new secret**. + +

+ Secrets Tab +

+ +First, create a secret for the **Client Id**. The value should be the **Client Id** of the API key created in the [prior step](http://localhost:3000/docs/next/quick-ingestion-guides/looker/setup#create-an-api-key). + +

+ API Key Client ID +

+ +Then, create a secret for the **Client Secret**. The value should be the **Client Secret** of the API key created in the [prior step](http://localhost:3000/docs/next/quick-ingestion-guides/looker/setup#create-an-api-key). + +

+ API Key client secret +

+ + +## Configure Looker Ingestion + +### Configure Recipe + +Navigate to the **Sources** tab and click **Create new source**. + +

+ Click "Create new source" +

+ +Choose `Looker`. + +

+ Select Looker from the options +

+ +Enter the details into the Looker Recipe. + +* **Base URL:** This is your looker instance URL. (i.e. `https://.cloud.looker.com`) +* **Client ID:** Use the secret LOOKER_CLIENT_ID with the format `${LOOKER_CLIENT_ID}`. +* **Client Secret:** Use the secret LOOKER_CLIENT_SECRET with the format `${LOOKER_CLIENT_SECRET}`. + + +Optionally, use the `dashboard_pattern` and `chart_pattern` fields to filter for specific dashboard and chart. + + config: + ... + dashboard_pattern: + allow: + - "2" + chart_pattern: + allow: + - "258829b1-82b1-4bdb-b9fb-6722c718bbd3" + +Your recipe should look something like this: + +

+ Looker Recipe +

+ + After completing the recipe, click **Next**. + +### Schedule Execution + +Now, it's time to schedule a recurring ingestion pipeline to extract metadata from your Looker instance regularly. + +Decide how regularly you want this ingestion to run-- day, month, year, hour, minute, etc. Select from the dropdown. + +

+ schedule selector +

+ +Ensure you've configured your correct timezone. + +

+ timezone_selector +

+ +Finally, click **Next** when you are done. + +### Finish Up + +Name your ingestion source, then click **Save and Run**. + +

+ Name your ingestion +

+ +You will now find your new ingestion source running. + +

+ ingestion_running +

+ +## Configure LookML Connector + +Now that you have created a DataHub-specific API key and Deploy Key with the relevant access in [the prior step](setup.md), it's time to set up a connection via the DataHub UI. + +### Configure Recipe + +Navigate to the **Sources** tab and click **Create new source**. + +

+ Click "Create new source" +

+ +Choose `LooML`. + +

+ Select Looker from the options +

+ +Enter the details into the Looker Recipe. You need to set a minimum 5 fields in the recipe for this quick ingestion guide: + +* **GitHub Repository:** This is your GitHub repository where LookML models are stored. You can provide the full URL (example: https://gitlab.com/gitlab-org/gitlab) or organization/repo; in this case, the connector assume it is a GitHub repo +* **GitHub Deploy Key:** Copy the content of `looker_datahub_deploy_key` and paste into this filed. +* **Looker Base URL:** This is your looker instance URL. (i.e. https://abc.cloud.looker.com) +* **Looker Client ID:** Use the secret LOOKER_CLIENT_ID with the format `${LOOKER_CLIENT_ID}`. +* **Looker Client Secret:** Use the secret LOOKER_CLIENT_SECRET with the format `${LOOKER_CLIENT_SECRET}`. + +Your recipe should look something like this: + +

+ LookML Recipe +

+ + +After completing the recipe, click **Next**. + +### Schedule Execution + +Now, it's time to schedule a recurring ingestion pipeline to extract metadata from your Looker instance regularly. + +Decide how regularly you want this ingestion to run-- day, month, year, hour, minute, etc. Select from the dropdown. + +

+ schedule selector +

+ +Ensure you've configured your correct timezone. +

+ timezone_selector +

+ +Click **Next** when you are done. + +### Finish Up + +Name your ingestion source, then click **Save and Run**. +

+ Name your ingestion +

+ +You will now find your new ingestion source running. + +

+ ingestion_running +

+ +## Validate Ingestion Runs + +View the latest status of ingestion runs on the Ingestion page. + +

+ ingestion succeeded +

+ +Click the `+` sign to expand the complete list of historical runs and outcomes; click **Details** to see the results of a specific run. + +

+ ingestion_details +

+ +From the Ingestion Run Details page, pick **View All** to see which entities were ingested. + +

+ ingestion_details_view_all +

+ +Pick an entity from the list to manually validate if it contains the detail you expected. + +

+ ingestion_details_view_all +

+ + +**Congratulations!** You've successfully set up Looker & LookML as an ingestion source for DataHub! + +*Need more help? Join the conversation in [Slack](http://slack.datahubproject.io)!* diff --git a/docs/quick-ingestion-guides/looker/overview.md b/docs/quick-ingestion-guides/looker/overview.md new file mode 100644 index 00000000000000..843d704526bcc4 --- /dev/null +++ b/docs/quick-ingestion-guides/looker/overview.md @@ -0,0 +1,52 @@ +--- +title: Overview +--- +# Looker & LookML Ingestion Guide: Overview + +## What You Will Get Out of This Guide + +This guide will help you set up the Looker & LookML connectors to begin ingesting metadata into DataHub. +Upon completing this guide, you will have a recurring ingestion pipeline to extract metadata from Looker & LookML and load it into DataHub. + +### Looker + +Looker connector will ingest Looker asset types: + +* [Dashboards](https://cloud.google.com/looker/docs/dashboards) +* [Charts](https://cloud.google.com/looker/docs/creating-visualizations) +* [Explores](https://cloud.google.com/looker/docs/reference/param-explore-explore) +* [Schemas](https://developers.looker.com/api/explorer/4.0/methods/Metadata/connection_schemas) +* [Owners of Dashboards](https://cloud.google.com/looker/docs/creating-user-defined-dashboards) + +:::note + +To get complete Looker metadata integration (including Looker views and lineage to the underlying warehouse tables), you must also use the [lookml](https://datahubproject.io/docs/generated/ingestion/sources/looker#module-lookml) connector. + +::: + + +### LookML + +LookMl connector will include the following LookML asset types: + +* [LookML views from model files in a project](https://cloud.google.com/looker/docs/reference/param-view-view) +* [Metadata for dimensions](https://cloud.google.com/looker/docs/reference/param-field-dimension) +* [Metadata for measures](https://cloud.google.com/looker/docs/reference/param-measure-types) +* [Dimension Groups as tag](https://cloud.google.com/looker/docs/reference/param-field-dimension-group) + +:::note + +To get complete Looker metadata integration (including Looker views and lineage to the underlying warehouse tables), you must also use the [looker](https://datahubproject.io/docs/generated/ingestion/sources/looker#module-looker) connector. + +::: + +## Next Steps +Please continue to the [setup guide](setup.md), where we'll describe the prerequisites. + +### Reference + +If you want to ingest metadata from Looker using the DataHub CLI, check out the following resources: +* Learn about CLI Ingestion in the [Introduction to Metadata Ingestion](../../../metadata-ingestion/README.md) +* [Looker Ingestion Source](https://datahubproject.io/docs/generated/ingestion/sources/Looker) + +*Need more help? Join the conversation in [Slack](http://slack.datahubproject.io)!* diff --git a/docs/quick-ingestion-guides/looker/setup.md b/docs/quick-ingestion-guides/looker/setup.md new file mode 100644 index 00000000000000..c08de116895ea5 --- /dev/null +++ b/docs/quick-ingestion-guides/looker/setup.md @@ -0,0 +1,156 @@ +--- +title: Setup +--- + +# Looker & LookML Ingestion Guide: Setup + +## Looker Prerequisites + +To configure ingestion from Looker, you'll first have to ensure you have an API key to access the Looker resources. + +### Login To Looker Instance + +Login to your Looker instance(e.g. `https://.cloud.looker.com`). + +Navigate to **Admin Panel** & click **Roles** to open Roles Panel. + +

+ Looker home page +

+ +

+ Looker roles search +

+ +### Create A New Permission Set + +On **Roles Panel**, click **New Permission Set**. + +

+ Looker new permission set +

+ +Set a name for the new permission set (e.g., *DataHub Connector Permission Set*) and select the following permissions. + +
+Permission List + +- access_data +- see_lookml_dashboards +- see_looks +- see_user_dashboards +- explore +- see_sql +- see_lookml +- clear_cache_refresh +- manage_models +- see_datagroups +- see_pdts +- see_queries +- see_schedules +- see_system_activity +- see_users + +
+ +After selecting all permissions mentioned above, click **New Permission Set** at the bottom of the page. + +

+Looker permission set window +

+ +### Create A Role + +On the **Roles** Panel, click **New Role**. + +

+Looker new role button +

+ +Set the name for the new role (e.g., *DataHub Extractor*) and set the following fields on this window. + +- Set **Permission Set** to permission set created in previous step (i.e *DataHub Connector Permission Set*) +- Set **Model Set** to `All` + +Finally, click **New Role** at the bottom of the page. + +

+ Looker new role window +

+ +### Create A New User + +On the **Admin** Panel, click **Users** to open the users panel. + +

+ Looker user search +

+ +Click **Add Users**. + +

+ Looker add user +

+ +On **Adding a new user**, set details in the following fields. + +- Add user's **Email Addresses**. +- Set **Roles** to the role created in previous step (e.g. *DataHub Extractor*) + +Finally, click **Save**. + +

+Looker new user window +

+ +### Create An API Key + +On the **User** Panel, click on the newly created user. + +

+Looker user panel +

+ +Click **Edit Keys** to open the **API Key** Panel. + +

+Looker user view +

+ +On the **API Key** Panel, click **New API Key** to generate a new **Client ID** and **Client Secret**. +

+Looker new api key +

+ +## LookML Prerequisites + +Follow the below steps to create the GitHub Deploy Key. + +### Generate a private-public SSH key pair + +```bash + ssh-keygen -t rsa -f looker_datahub_deploy_key +``` + +This will typically generate two files like the one below. +* `looker_datahub_deploy_key` (private key) +* `looker_datahub_deploy_key.pub` (public key) + + +### Add Deploy Key to GitHub Repository + +First, log in to [GitHub](https://github.com). + +Navigate to **GitHub Repository** -> **Settings** -> **Deploy Keys** and add a public key (e.g. `looker_datahub_deploy_key.pub`) as deploy key with read access. + +

+Looker home page +

+ +Make a note of the private key file. You must paste the file's contents into the GitHub Deploy Key field later while [configuring](./configuration.md) ingestion on the DataHub Portal. + +## Next Steps + +Once you've done all the above steps, it's time to move on to [configuring the actual ingestion source](configuration.md) within DataHub. + +_Need more help? Join the conversation in [Slack](http://slack.datahubproject.io)!_ \ No newline at end of file From dc9141a6f405aba866dc8a31212dce9d9fe3fae3 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 26 Sep 2023 13:34:36 -0400 Subject: [PATCH 6/9] fix(ingest): bump typing-extensions (#8897) --- metadata-ingestion/setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 2387e848e68a2b..80e6950dc5ace5 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -18,9 +18,7 @@ def get_long_description(): base_requirements = { - # Typing extension should be >=3.10.0.2 ideally but we can't restrict due to Airflow 2.0.2 dependency conflict - "typing_extensions>=3.7.4.3 ; python_version < '3.8'", - "typing_extensions>=3.10.0.2,<4.6.0 ; python_version >= '3.8'", + "typing_extensions>=3.10.0.2", "mypy_extensions>=0.4.3", # Actual dependencies. "typing-inspect", From 622816dcb8bd661f645b37a48c8f9d9aca7be9f5 Mon Sep 17 00:00:00 2001 From: hariishaa Date: Tue, 26 Sep 2023 20:51:30 +0300 Subject: [PATCH 7/9] feat(metadata-ingestion): implement mlflow source (#7971) Co-authored-by: Andrew Sikowitz --- .../app/ingest/source/builder/constants.ts | 4 + .../app/ingest/source/builder/sources.json | 7 + datahub-web-react/src/images/mlflowlogo.png | Bin 0 -> 19569 bytes .../docs/sources/mlflow/mlflow_pre.md | 9 + .../docs/sources/mlflow/mlflow_recipe.yml | 8 + metadata-ingestion/setup.py | 3 + .../src/datahub/ingestion/source/mlflow.py | 321 ++++++++++++++++++ .../mlflow/mlflow_mcps_golden.json | 238 +++++++++++++ .../integration/mlflow/test_mlflow_source.py | 104 ++++++ .../tests/unit/test_mlflow_source.py | 133 ++++++++ .../main/resources/boot/data_platforms.json | 10 + 11 files changed, 837 insertions(+) create mode 100644 datahub-web-react/src/images/mlflowlogo.png create mode 100644 metadata-ingestion/docs/sources/mlflow/mlflow_pre.md create mode 100644 metadata-ingestion/docs/sources/mlflow/mlflow_recipe.yml create mode 100644 metadata-ingestion/src/datahub/ingestion/source/mlflow.py create mode 100644 metadata-ingestion/tests/integration/mlflow/mlflow_mcps_golden.json create mode 100644 metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py create mode 100644 metadata-ingestion/tests/unit/test_mlflow_source.py diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index 61667a941765c3..dba8e8bb1dce6b 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -27,6 +27,7 @@ import powerbiLogo from '../../../../images/powerbilogo.png'; import modeLogo from '../../../../images/modelogo.png'; import databricksLogo from '../../../../images/databrickslogo.png'; import verticaLogo from '../../../../images/verticalogo.png'; +import mlflowLogo from '../../../../images/mlflowlogo.png'; import dynamodbLogo from '../../../../images/dynamodblogo.png'; export const ATHENA = 'athena'; @@ -64,6 +65,8 @@ export const MARIA_DB = 'mariadb'; export const MARIA_DB_URN = `urn:li:dataPlatform:${MARIA_DB}`; export const METABASE = 'metabase'; export const METABASE_URN = `urn:li:dataPlatform:${METABASE}`; +export const MLFLOW = 'mlflow'; +export const MLFLOW_URN = `urn:li:dataPlatform:${MLFLOW}`; export const MODE = 'mode'; export const MODE_URN = `urn:li:dataPlatform:${MODE}`; export const MONGO_DB = 'mongodb'; @@ -119,6 +122,7 @@ export const PLATFORM_URN_TO_LOGO = { [LOOKER_URN]: lookerLogo, [MARIA_DB_URN]: mariadbLogo, [METABASE_URN]: metabaseLogo, + [MLFLOW_URN]: mlflowLogo, [MODE_URN]: modeLogo, [MONGO_DB_URN]: mongodbLogo, [MSSQL_URN]: mssqlLogo, diff --git a/datahub-web-react/src/app/ingest/source/builder/sources.json b/datahub-web-react/src/app/ingest/source/builder/sources.json index b4ea2db018bd84..1bd5b6f1f768b5 100644 --- a/datahub-web-react/src/app/ingest/source/builder/sources.json +++ b/datahub-web-react/src/app/ingest/source/builder/sources.json @@ -181,6 +181,13 @@ "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/metabase/", "recipe": "source:\n type: metabase\n config:\n # Coordinates\n connect_uri:\n\n # Credentials\n username: root\n password: example" }, + { + "urn": "urn:li:dataPlatform:mlflow", + "name": "mlflow", + "displayName": "MLflow", + "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/mlflow/", + "recipe": "source:\n type: mlflow\n config:\n tracking_uri: tracking_uri" + }, { "urn": "urn:li:dataPlatform:mode", "name": "mode", diff --git a/datahub-web-react/src/images/mlflowlogo.png b/datahub-web-react/src/images/mlflowlogo.png new file mode 100644 index 0000000000000000000000000000000000000000..e724d1affbc14d53f0ec8d6d5304b8aac1dd4f48 GIT binary patch literal 19569 zcmeFZi$Bx<{{ZS!N>V6=a?Q2m79scOf)H}QjO3R4b#AjMlFBXQewmUx6Jm^!3d#LC zj7{#$CfBhs=dI7>`#a}vI6WSfz20xH*Yoyz-k*DI_)zQY>C30-=;+SsXy1KAM|X@w zNB7s;QzwCy^{M#<;J?58A8Bd+HQp&64t!wr(YExbqhn$_`ui7MW;Q1sogTZ+T@91q zY~mDSI=5-V;kt2~82<_(|?by)A|$t3?~2(r9j^gSx3csl;gbiEXZkdFDt znS6m)5EJ%FTOW<4A2PD{FZ^)ok^vPd-O+XyvA3(WVzBY%t3+(;a4^`^! zGJ*WUzS+8zQgZ+t zcY7)ZwX+_1vz*|h`4?1t)^+pcj2=ef81QKFsU%gbUzB3+7suFPFvp+g?OJXYwQGVQ z9zAue1+RUGm=j`qRY`Y9ln#xJ_W}T>Ht{i{7$z8WlD!3snTkxiThUft_S%~ zDFN$WntjIqf_)UM^jj|?@T=9mKXzb=Asz>wtXdi3jP6nLZ8_Cvl%6Fh*GE(6F`o2C zPhM|vvyC;{Fsw1g7mYZvkmgY-!1C>#iJ!{FSNC3|t))D%B~~%{yFBva#k$B$;iduC1YZfCpb*>?YExHhnyOcQy7M zOs@ir?>k!FjiRWqd0`rvG4e+|Z1j>n+8buSev8cm4(2JBNDwv6vs*!p{CW8%z8cNZ ze%P(?sZKQ&Mri8NCx~UmdtaozXJ22!#^@kAR+K``)8?3UAFt7!>A4^Edaf%<>gqRWm<<(zy7> z*yJtu`v}B06AeBS%b6QiG(Xd;XuW$NYH>(H)7^X5$v5!SK=ole1w55^WE36q_m&PE zBBEq?Ng!{p)@6etLTAxXu!&)2fz=EL%8LUya{Bi8=GWaFZu%|#4p|s0hlxApIYRk} z@AVwZinh(89^sQLvt=XuGXxX=@t`SQ;K>)X<{C-lDsqv(oF!IRY*@73iwRhMD<#nh z@naqB(H^C~Z+-+qJu$~f{E@~E`UUlR7Gfy+O_EeS>4@uZ4WgX1GWu5or*>|umjQx| zsQ^~#{JL1SSU_-3bNhWA4D@JagV4yLf~Q{;&3?I)I;V=j9xcvUUubD_dP){NIE#@u zTJS@xW=yYoWXAHf<9(-&&g4I|YGl6oS7>7XbhCD?#AGY@2*hXmOta)hM&f$DnBK!c zkQ%TgeQozWEhRsMSb=b706vREFI0PLeokc}bqcz{{5}yjjwG-1id91b;~?Qv zpAYEhGD(NrZOgEI=fSFmwKeI92@LpW^~Jhq+hSd`Z!35lct60sLhuUv=57ZEi_LH~HM*giuI`df-GNcdQIr^B}CxWp@XWwAl?s&0? zB>U7I{UWEJ`mp&a1~LJJIRkvuQ?gB;yuvx5P%Pd21^h?A=LP}pbF|-Uoo?3au@3x5 zsQ!b2uy*`!0rdTVOg;Y)2*&A1-ja2%s9|0#*FI_mMNtGW!ny?b8^`c^1Yqyz>OKQR zV@Y&U#&eF_&H^h zCQ&LJlCSNPFf$8E1MHj|ck9&q+X|<0J*BAj8^=e&bfKX1%Oda^9QNvX9-w#X|7>_G zqVZ1DE;oRuBvb`}9uQ=OV`$GHkSHBp@p1Z~pnbz=zgAV49>_@PC%9)&k2paSy#(;( z2m@x;NuCBk*RxT)E~w1nQq&3ofVb`LJVqnMUf~HS6FNFWhEt^Y&q7)NoANAtShnys zY2t{2fJ|+l76HXEaLGTryq>AiTY}h#)dN!h72G+f*OSJ$6&OWO`it&%4u5^)l?!6g zy{9q1lxA%|dS6^3uGwH1>F8GITc^&l?b3v}CTA5I5tf-H)JxA5?s7C9QGb{*qVYHr zyN8RWosM+9l#vG69?`{qaoJVI`&DXRAV>^Akc~cwVFE?oICfBO;saa-T{)yX3&tr% z?1H-F`AKXlAPz?mJ_PVQwapmef*o4s<|tXHUPF24z&Py$`>~L}0Xfw=_1q9HdU^6V z^aCNOAjE$E*|veqv4cAp@G-h)N}4_#qkus1wR=XiLUY94*)E_wjH06L#5Z1EebDRs z2Nps4Ao~;0w~WvnfT<76N*1i^fb{}?Ljk2qb5Xv|A>iq7mu-WRLK%qq>oiYr<7gn)~3`!ewR7vN5s zc%_nQ+Xe@>ob?ZLk^r~V|CY5o^g-90`Vs0+%BvUe@c_b$EKUNxTFk%XhNO;$W zV{o#zh7rmGkY|v~#6qPVJpLhKMm$5|ZW{$P4JhKd*8xa*80#~TM;%)NbngE8(mh5A zwmLyM&t-t*5EjN_*P=raah`xa94pLZCcRg?a`uhlsj}GCbLvGAbNk(JLh5eHd2FQs z^!@@CF#6J$Yk4MT$qg)}vHIP4LKyYBj9%Kzgs_fM`HV**-Gau{u2`$&cHv>rID#4Y6=K z!>k^=SbAcf2)x+SXX_D;O|C0Pasfc;xyjda%oJ`pt0TUNIrx;DN>GRy>U~BSHGqr} zb9XYll>u8}G3?>PGIMzIxjU5k*!BGItL;^s1Tx@9PI9npGPBp?}NzoL7{E@i<;FhV6W@(vVvM>_Sa>z+x zGPX_<6t1(oID>R-CIWWC!8EHC1b-X(=_e<5=Yd$PUCzN~c9?}wJrXgd{5S8*GS84S z)-N1EBE7GD_^4I_sxfK+z^~3aMq{YKc>&P)0?_a->)J${w)bnpS-1>p_Qr#-xIpzc z;ZGcn7}s7d5(z1m^5ZeP^eqBnp(MGvSl{2+_@j{*wLbzio0$p_JUjZEDbmh{S^j289IFuV~0iAfbqQ1SCN#`_lH6tb-F zp3S)q&9I-tu-BL1I@HHBs^9fMm`;-AN-H277$;pB zIXs{aS{f0uEY38Ck@q!9q0Vx(+t`NkqeQMh2+%n~TOn zYp$!S8S;(L`Br}~7hztyV!uh>4)hQD4=h=?0}A#+i!LR^3mU4T*YUjy>Wdz1k6C;` z7>kT))hQ3^Vt0M}@PK{$3L|~@hrN)Nn|A8%?Ntc(TJn-HHHp*zs3G(fr;uB1D z{-8Rc*9!Mx#Vw1dhdMQ0cF@}nkiHJRtKRObOX%^~50~a&pJ7{FQ&5-992p2|*9lUk zJ;BHwdv=xW*^W%>#vSMVwkJO*MVC0TI&KYv(SkbarrPQ{(%9gNOYY(fy}X+i>NnnR z<{$`lN91|GE7Dp+6nWh4`5;9VFnk#;TaRBimwpOB5gvIek>ckaEY!~m+@eRW3CY5)pZg?#DW>~J zTj^=zLOs{9V0jx9!*Pd(OLn*Re}4l)sTlEdlM<(9`vBGy^~|FCz?~mH%w!0(qH77a zF$K=Vk8SfggRT-Hx22?Kb>4q!P8skwH+Jc3bIxK8n&K+^I^!$W&nMSX2EXZjiWmpP zP^Xi~#k%IJE2y~}*Dn;Mt3eI#Q9LfL1hH^=b8$acyMcaLEEC~)KePr-v!`$^yQZ9D zPcYg{y7#-MDH%S3qsQ>dHH$4X^xb8O}h1-K36h$=&#UaGYMV zYuq6$Q_R?2&+s5=j|^h|w(`ZowLNXf^Rhb+n-eejrtE}&f8_IZiwP!Kw>eH%BpdLmpLC4zHPAYay7JFEJ37Qp)d!M+KUMU!7dPmUGwb@H1NlQL{s|8ap z8PxrCaM?BwE{&zlu|wEQn&UsH3vLN(Q**zkZxPf?+8V|i|Ff?NHcO^8ZlM2&jrH}JF^7aH^!xX} z+PPo!SH*U8%eyEcNB5g#yZfLr47G8Kip?@m-kQ~D`UpsS#O_V+v5wF>Ep$Z~Aj_3( zjB40hzvoPiA^7!V^-tdNqv@>SMxi$<2()N3TjBr9KUpq#7+iBLq zJcCb7C!L7ygc;1V6sHD%^?s|>c*uQsETml{eG56>)rfUaZQuwK}71 zFydnU@bk+AA#08nS9?5@b1fN$e=&Jh@zg}!{lVdJSw-{ROysI??zH9n&zyYGNPfjm zkG1?q(D7UHCL|ejUiP7I02TjR)xD9=xrH(+E+6D`s5mi=JCsvRZVvtKJ$Xytc{d=V z^-z@6s+ZR_s~g{!snooU*_4MVIkd<6_SES2J<19X_h)wd;TFIH-5OwsFXE77os>0^ zgnZb<3f6q{)WIzQzW%~9DiakQcesq|t$OOQ20=2`UiV`yFKE}7CD}mp2+avPn;&?3 z+^?rjDC{Q2+oUz=N3}IVWE388x!PnsZ1kUs@Vr6YCD88#DHOTX$&TaV)cwU1nY}iuCLYEtl?FD{G%%;W$OQ z1mu|pR-fwnF~IB7aRnpb@g%(|-HPa3Lu5U||E9Bem`7!MeC}bd#@kdrBDy7D>ac3# zScAJ=-K}kY1I+`k2I#zqU_ZIgyU+&txa`ABSrdO9v<{s;GQKn1-^cCbV_^Qym8j%1 z+tS#)k|j?&_~IV&+l7Vol!OZlKKJFzm&X3AlAf{Z@jHyUYEVk<0+sJ)t33Ad^&-8D zBx>zzM$)A>W+aC|al&_IHKPseR(T4cy6qze?erG-qpW(!&{(sZb54#EA7`I@;Ev5S z>TuQ`W=E_%BXcXst`QMbb~?t+qi!>No>DcwB#Zr&Opy188*e|cLuJL6BW$+!&mbGZ z`IIJNjE^56;|>vif4Plh96py_=aRDF*&qLNbB-sZ;G$$1-VZug;HO5D8+}8&rw~Zf z$K~Awo~H~)JYEW|m}_;B0X<87EgcfAZO7#6h4(Q4S>WE6)Q|_;Cvv-*3kKwB9JY-R z@W^!;dy0Y|HF}N(L6|Bv4A=9sQ`8I}T?p0rQ8!P893#+NLh4lIw){KTCm9y2ixjSM z?OUeA7-7yFklX*hB{^!npIwJ+(;}s>og1tRE%Vt_MWS%`Ti2(Dx$)(Il6;@Hd!;q9 zFSI5(i->a6fxQjGhT0vUBA|LDGW@A;(E7ok^hi456C35()RbfjVgy#ac>3g!^{|{G z8sb*JO6NU0>Ex2{2Q^d~0bj5C-ejiv96FUF_H!r5ZQ3P4vA*nimU!D?>AseUlG4H9 zH}Bq?yEP1_UZmC}sBHWI^}y5S`L!R!7Ioix@=>&V{{XBwrZzCp-~Sf7mrhS!{lS~Q zZM-~u4kM>4V?XEoycxI}y@}n;o+G*dWT}yK5~{g?zqIL#Iq3V|o2B(;`J=zXBBk z7PhR)3dHe`zw04Gi$CedSmqx$+QhNAtj8qEML_NY>9l|F3jAkTKeV4dQvE_F_?(X?L<2%WPSb)(aXy7c5^ zYi7j4*}J@JO2p{(Ao`UxlQfrbyBDoEuj}<$hjx27`&Q`mn*2*jc#*Nw{Uh;;=<4%l z#xsL3U*%Ze_%pLSYHN50oc*n2QyL`OY(#kE~NYA2@gV1F+z4|awF;g@SNwq_4+2X&no8q5NvHC3lQt58UQ)gmBHMcDtI7?5jJNGh!ov^{jK=EpFy+NXS1EVh39~nr!Ms? zcNj5Fww|RI%0eybjs?W2o;g5HO@M7eEiIDx*Rv;;?we8%1k%oGTf_S zl0p!;YMF&q-{PEQB1O8RyeZ^2)4e{*2>jNG<@2qoTtE`Kz)J5h z<0@4{bdV1Wa{Ea?o;As^d`@evJRS$hQVKmB8^b|T7_k_ab$|E3zacnTa`56!k~3zT zSsJ@3+Hf$-8G2#qBnDlgP4-PZP$raWhNbq%4D#DD&VDnCyfhFTLH*Ll`!-U#qquT? zbDj3YF>X5dd70UoeV}H-10Ik4+lM~@7y20V37Y$RmJ(!ffj_Uv;huw5ACO^$NP4ZV zx`rNjX5m2tFLsASb~$zL6}Hn;hzrjSJ5ihE5E54H2)Z) zz>`)Fn<@efskZsM*GXreFl439>mzhi=b}2pt=HUdQW88oZ~6Wzh3e-Kec8q!BsaIU zgK3M%t=+zQa*lghLZnqHpIa{GUY1H9p=WJ{ag-0@{&nt~EoV%ak_+y0eapJtg(8KA}y$ULcq{tK>vlk+}N&Gvy+v^q^2O;wU-@{G_wCi|mxKtnD2} zKboc^d0A;RtY+M%1~I%p_=|0<1D(4$1^_IcF+NUR5w}+Oz`yqvInQ*3FECW&NqTlR zj$7wx_^-e#FKF(@sjmT06=)TwV}gQ41vkfR^So<8MPlC@tn9C{DW3Apif7QXP#5gF ztnU*Vb1A$_!*li$Io&hd#SHx?pp#YK{N}8(R4r*dR90Q;+H{J+@#xaH zrQb0K_)gc(Z$FeRTnMM=b$S#YB$%4c?}fMIU-nKc_I3xT_6}ijB{*+g5ZXXU65ca9 zKep!nD8SUt*kvu@g(h`YKSwnK>XtDP=(A=iv&mP++_P~*>%wwhumjrN_w7E866pC{ zv^EvODrMqH;pdPmH0~23$FN;0HN4OX%3|Ecu3|XwYx^_S);7^5Y;IoKMc&$@VZ%mR zL>FG_p$(=v$`{EkraGxPuwSXpx4w2}%WgXVTDqyHJDR0d7-I#vaiZ-k$?#4d$MY4Z*M9*yYhys0{CKY14+k6B zUM{YP-s#1u^8{&asBmHN!{ZQ&c7kFV8bho07YP7r(m!70Qg2IrZq5;urpCIz*ttY7 zu?VdRhCZh?Osd_8#2-p0+Sw{-KdiR14ukhJepFf%`&h^kgF--OKVQPwLmy9iwlmoP zw4z~!-MyYZ{nT=E!t4mBRsKWCptrBriaTRs>pte;`TeI;)vqfy))WoUGIp8?MUvvy z)by|yB{D1)fCrG*lmp}*(-$R@V|Yz<*Y(YiLG&=zaC7l@>;Z)_HXg?P;PjBmT0iRd z*DUI>mD+vJbn7}*NXNRs6~>9QcaHiq+)=|9omb~kJMS1x@jV((6B1N+#?tCZ_kO-u zhHH=H*m4u9%>d=`S%exA(Up_&Wy1j@9J2y%6le8fUlRgZg^Eo2`PlW_)KNvegMHvY z-mB~qGJxo@xo+}#f8I84t$TOxuxP}SS4Y>cd06evy{gIA?}3CBTf?H}PQEHS-K^Ze z$hR=0TdhlOYVk>n$WJ4~oLLmx6Qc{!9%F|2a{5q2lj6RbACi~D zKXB8J@YN$ijF(aC%#n>XAv9~5+QW9!^R-gGmZGiD{eAx2G)bf|_?ZsNF2WtK1-=7; z1_y@GgWv-9OzSg5)f#W>?MJVb24mdsnZ;#bYon3#Tw+erGg%cFX&cU#2^HtG?`q!? z9@w#!lr*?629#KpKi0E;|W8OU3!JEt=okguyo(TOatanvlVuP3`W$6z;8= z<$x$IOs%&VH2!1gboIjs>aC-4E7KN!j#+wP`aIwmIK}7`-8;2qdU2*ks7q-X(P8KD zbvx9$73vJZcP4GWdY$B7-pt%h&@X3aEA4im!M_u*ECKwFj8UK8qmwz8H!{rDQr>?^ z`XO)*h#IV5y_k~XhiVxYnM*fqFpa{_SV=JJq?9j+oSN=QO*Ul7p4=C{=fhXzZno+c z(KuYuHB@zXwcL<*%RuxK&U~bLl6GmWB&{pAUp@-pau?H*Q;P+?FYR2v<9} z2!vMWYoHwu7|2aFZ8gd^umbD_WJhwPQLFj_&_lsK5oF_yOsXyd3^Y6Pux?907pVS@ zttNlZ{=GHC(nYPQT{ye6=!r?>iqP0*LAXz`*TlKgQ+;y?p~&{8lHd$OzHCsj9zYD~ zT#0~bqWY)1lzhNj5Ry52?bW@tT*XHJ?SZ=9I>2hYdbsB+@}T!K2M)!YEcT6l!0r@L z*{j-0JCse_8uNiZwBmHqrIZe!7(Mvdt;}Fy@2Qcw+V3!T=$i*xSsm9bVF-@bi1^t| z6GORQ^W2zPubA;(yzX{Bkp3~jY#)6RK2{ks1RBu-p!xay?%bEG_(h-;)(dE4y&i*c zQd!96wo_;3Wh_LhCZGa#iiIaMOu(^N((<=>!#N8pi6}vZ}BqG!g9yXjjF2 z_~~uNg|*zOT?>UG_|lkL%7FoEaxKD3Xff8rFeh^;=25gPc9-Y9txAUEon;ix^M^iI4;gBhmp1giTc_yO&xq@nOxskoA0Y)yy^>xvMtr6~}hwsF@MsHr)^!Km%{OQ&0 zmaakf51>ykrCzTpf0*kHfP$~fnUEapJ)_>an7qH`^tp=Y^&ks(bl@2v5_$?48A;rBz${?ExUF#I{IYf$xy7xPz%3 zPXNVsdP_@~;)dgIGA#Y5n1hx=-qc@sSaR7f-)2Wend6R^c3SR*k}HEQOhjc`#RR3g zg)0Q8=&L_i7@zP0$|cpyvp@+q9j#WNtGN;>vzGi|u}&0UN9E4)6!C5Iqz zpJfwpIdy`()_?%t)Kl-hc#34_Ys%LO<^Ebfuqs~L{~VcZ`(xoKlrOGiUm^$qj8Ufa z*80_If2C5fCka`db z($vZD@zoSJo%rr1g9;Es=uTgq)T8I@4MBGhr%rG*D5vW`8y}4nq9o>y2OW`z7yn~_8uGbpJrG$@Ys0}<-3E)jG{d0Bds_POyAz3 ze#c#VpGWb+faBW-wD{1`ch!MXz>X58HvT1JcBODV+hlZ2OF2e_;oa?Upu^(H*TGx? z)wlb0L=Zd!+)_%YEYK ztaWzvd0{*0!$U;~bP(S8*#M|KhiWu^C2ex4=UMToNe>Zo#a~3|?(msg3JfHu;3U98 z3vu18^g$6+ALr;j(bvaG?Nv9?zR`rtHj_fuQ!g?`=Z)*oH^$Qk8vOXUh8zdyg$vw% z6x?nZ`RYDA=}RCh?RcQ2xlc+0~FiB(g1a+5%D&_os*zq1+1XcV(*E z{3?`qYubxD{6XI&&>->vB?P~10DoIvQj$-;%hZb2s(xwXX8Somj)}cs)L(&`Xs5e+ z$Hd|%>1$i+pzDH*nx~U>WN@3Ac_3pW%O}Y5(&j55v`DfVl>B@#3yvJ#@Q2tbMXMzf zb7F-ex*RhuC&^$NCj!+A4d80mQjvlHZ^}L+#;`HsJoWcEVd>pLplYEZ>|qrivs;-Y zCl|N{N{9Nab#3<$J_c)SP|NN3#NON^Nj-rf?_@w<(VRuN!g}6-de#;z>gxQnToO3m z^>E3#2leWAh+Vu;(@+Dw0C(SfX_NvRnSCJhO^2R?t=AR^{s)pykve!GtGYHA#mfMD zASuPWN_`6*@T}-l0}W${j7&&5L&nv;I!Aqd9w^3s>uSMbU-4{?Q)Y znI^n*1;&bWu0c~&`f%Kv_ewpnaPd$xsU`ii7qKjGA9!YCpuDDqSpzt%PW7Ho z<$WJMO&`&EbH=1iVdlEXD*}4)U{G~>G1XL46&memDf0BNGO9!-*?FbF)IKb9w33A_ zphc>hXcQLzoiLTKB_-Na5jEHaG(vnPE0^?f^*9IH@8iwLXA^Vf*KONeLQ)M#A2%1T z2O%n4=*e#M>AdyVR44$Vx2u+^jN>ehJd?smb#Fakr zru$xOxA`00$Xx`1nHN_ zz}`9_XdXF5x^s#|Pu3XNm{ny`fR1mx3Qxn~roiKmJYW~prMt5t?X*X4g6%^Xn1&o_ z!oa=GSd=3)a;jgg{^j4eBb~5qK%gex#w#ZFYza=e`BJ^~sM0HO*S<$X*rg2RsL?)? zaVvINE#F9+8RaIws*ks$<7!__+r~|+y_kOJ!*@X?IpUJIaHZyW@D{fsld zxNM-4DpyoPo@bAs>LCeprnnLzEUk&u!tuRFUW;(4LJq>|0^dfzXcEEo=2{JvB>{0mji?tYymxQm)t`! zTAkH@vW!vFuv*8nAuhR?@;<+vpPr3htVoH?=0NWrX59l0ROoAwo}HB)BQ;k;zD&rU zP%_(0<5gLR6H8Ow^gevL*RX25nMzX6%^eYnfB^A5>yc)7#_>I)@)A0w%HqV7fI>uP z6n}$ZUJ;iG_A8H2SM|9`7x30aA@?83OfD5|z4b>8Ue7THk!~=V9qG%cX3&{HmDGoB~*rQP*~oWrGCGhMNE*ROw@s^-ELxYn5HTd;503kSC5w64i6m0K9Yn0{a_Azdk~w0 zysc`+?Q-`2;d!9!a-b?T$R)&FeZY_~BMvz8RX*gda}cH6kY=_m3f4?jeN&M{M9UCj z4bI!{xF{MBnzCtxn zvH%H;v1}=b3|CtHPs5dIhKSZ2N4M>?g2#gI-yh_2Kpp^&oM2J>IX zMD+R}R;v+UGdu~+KTK5{29vlmLh8>X%U2QMN435$I?s*l&RkyBE%(&=$ak51c{xkk zThap&9&Qkd9P{*g!AN#i=B=SL-5YNjDAD;(zcz|3MSAdAC@B7*{oRS~aScdfx;GXr z@Kc4&w*fOYnX>m&CJ2%Ft2Ea}rwHtLKot#}mEBCooL)6&_wc_XX|N-!v1RYx5^2BD zP#(GvUDe}s69aU(L(91OCs(r$wvel8F6%?3UnX}X1pY(T^li3~&to1=F1;W7&j6W9 zKmPHq-B81K#Dkj}R4b;H4Lr}ACnshZD7b*j z51sX6W~9>L)X(UHdnLYQ33eHe9LdIGMh4!c=Kn+S&6dk5p9MpQ@j~3!b3p!{O_(oj zsSI#^Nu}T0dvTRk7M2TQ=G$|xL%bTR!z(TnX^%cW?Dll>IN&`gv3^}OM)z_lKdnzM zi(Po_raH23W+%6dWqYduRs-OeD+Ef=aP_#b9Yy|2#3?w-2{v~YoY9_>q4q2Fa* z>YS2v_gGm~Ro$G%4PdRwK}O}>H+uKCt8FPHl!y>a^Kr{YWrb(NJk+KL70`yr*eO4p zp!0OkGEEPI#U!W#{gs=lx}Iuw%O|KAJ7xdf5`s#7*75lMw&dfpa3HpFT!Hon8Vv1y z0#5T-Q=QcGk?JF@j#E1JY9;&ms(y!B3fXFLw)9b8!h_^epjDD@eVwW8+5)6p0_?8& zTUln!(!a(Vyq#r;^jNPFJh;}?hhY=5+SurZm;qU~zik+pcQSuQuSPJgg3ltFnX|87iau!(%b~=6#{pDR=|?(EHn^*(8yA>lHe;6Z3^Rus@f!T&#A`Pt=|iW zQyxTLL1@`gwwlYcCR9`SmA9d~p1fVday0{4<}f_5XYnV?j2YKKsp3DZB!{w)DESL? z_C$f=o}h_Nb0F~{5{nlfFgCRad`Ebs-`_ma9*{A_{toF6%3`x$X1!jS0<^Ft z6Ym0+&DWoN-5s&z#M|$Y`OV&|yd1CRkL^dbxO8+J&hLCOb`6_s}Ew$lF0>+Hj-oPex&-=nlQn%*W93 zk-8I|5Y@8lzl21Aqd8mf8_jNUZkB@eQsF57-cD-#>fhy3s z+BOWh!9ap19B6+nxrBU&RJcNQ)%Bmk&!g0IfaF_SuY23nz3v5YXbKS@wdHIY$--GN z*|XA?^PE;ttr}OBho81=qG2~ z^y|acDs28QiAmo;K!cx{injE&`uwA5?^DqG;?uKI%xY#p{%mEFZ3!3f^WFCU8)a#) zEdfZ^$2gzIP$Zz2UQi+hRC$&oRJ1acLVK$(Ob#iRjJ8b$N+76*FLo)!ZztBH>cTk& zxl~Pg%YOfk8KT}BgJkczl`LE5wkc8y8gj&jyl>*X5E_%P#$U zYn9HDj5her|MFYnPsELXdbofzmyE5!4{e!kAatjO%Xe^!>_A^^NzEeAkmroZMxZ`; zJ~XvhK-~qJd9#SYgBxmv*bN(s+;Yelo`EvWjxSWz5+*vPY67--Vp;w@=3!*xfvjTIi|9E` zAY;NfD>bp^T5Tq`=s)qJJia->;xsYb+avrt4JJ1XCx!Q{D30=7e}jLr4NgLM=Q4Og zrKBszTk4sU*f_6`04+gtRI^EpN;G*PkG39r(JePu1Xp9F@vq+Ln7JN|tofkY< zhb)y(KTZViKr$yuKy!nlr%^~pVLYz+%yB9z;7#oI9x-J^?xoj-kH?@NMmm_@EQ`wa zO@!slxhkdbt=P@+>%~LwhDL_SE_%F49e;_rh+$bT(Q*uPe5FJ6qHA^=9t4e zr$jCQz3@Gc^m@98d>3XL5QNE0z+CFeoF<}5fLg%2Jb20X7sLd@QOMs&G?c74m#L?(A0DeUh&V_UEjQP4W2Q<-PP2BA-;^ zHAl+?N6!%+OP=46yNx8Oblop4N{90h!2#|KFJZ57d@4_Z)!V6W;W|ND&`(^R8 z*Qd6CX04x93A=x#xnLD`h=Ra@g}D`mM(f~*5HqI#OeE)I{$NS&fzy|h(ZIwVV3`GO z@;q)JC~3{!WaG|KqOd32_$&n!7~Ky?mzGN(ytl0y!FZ;&s)`d`VGD6jTKqM#TV{RJ zo4!GdxrYE1)<&-P$~rx(kmY%8K94W|^72%;Ke^CX=3j>jW9j4VwR0d%lj`INQSAZi zNnNhR4c)d|1TqG09&0(OEB!beWW<_1aP0{st2?W#GGPUrHDZ!M6F!(z`}M{TMQ_Kc zfMQN@;Ld5Nzn^E?((civs<_l~bw_{w^vF*XG2=fG0gfWz+Fo>J%?B>5J1kF~0j$D@ zhSehfC!S=Jd!0Z%LWy1Eon2#GKh46yWv#K?TO0M+w)jwuqGsR&IGSIC15=^1!A*W# zRF%^)9q9+H{=MJkm)eaN$g4RBYxyr8eTNHqqF8Pi>!F35)WiU|#qHR?oa~LDHaSV% z_~lnH-PLz|JBrT;!JxC8=kma1lJ|m!Ao+e^r=G*O^sih?%A1ft(lPjyQbSxPxYF(9{}68^D$AKAEb8r6shl zOyv!jf=k2}nWS?QQ1|4b|4E0~YPG8NOKrD*D!qSlcI_x8SV8J+ne<>PeL-$s+jcEi zLbM()3g(2)pok6vTM!w;fJI+)uFfRX`UF5o4wkPu0|29RxZ5m)o z{xHU@`l@9O<#>&2Q4vd8cEf-d%s%$*h}@EM9*nz<&#(<6ptc>(UvyDg!=~<-zo(@V zc0BYYrB=_*xMX0x82{yYd;}y~ZqMQ5#+CG|CU z^~m3&7$c~kJ1x4#17rSGc-w4~=&KFJK1I1yS&NKo;>r(1Y=--|-g~|ggY&8;NB$O2 zrBzmc?Qy&a4B$mj`TYAkemNw*No|<~yVqTdHR#i*JQ>aRUEL_E4WMY$zZ3-uj2aHD|yidayH zp$1KvsjVxW8c%(tXIe&oa@nhEHxrxTkx=v#asMnJ}8TMueJDDPbD^i`L6 z_i*a%DNd2qddZR3sk>3OZyOZ{GzDRLC**cd)*njdDK2?$Z~)^&I&#}Tnx3wa|gjs2`9i~30&OX^d{^-8~D0G2X7u~I!IU+MC6OIiTx!Ym%6x&T_ z>O}<9Y7&LjzuEu=dw9>Y@XC%0uuZ(%sbGpQwlZ zDf@3UqDoI5%n}MB!QKt`@EGf(6)e_VKYD{fMgAHX`w;XzB(Ozc-N3}!*A7!rhhXLN zT7gbDRRY7qdKgd6w7nGNJ};k9BoGPOAoINb^T7G*89Mi>mC2Nh1Y*?sX~J{K;j{*X ztisnhM}z4nkADp$=0*ihS=~6A;f~T7Fm35s8|o7wnlz`*X-mCY^BZi`J{$n=f&Y_O zYU>ox>HkETe7gfkH=F_Ldg*X(@qwYO_Uj1Uo*Zdd%W{v5`7f#bfsE#JzEcl;E3WUz z*Gl;eZEi*2b@oNB0UiKuYabbd^Q{AM{>%0qAOsx13j{xI(g$n?&^QM)`(5+klCWZHn=Ks6>~#_9f$%;*tvPHhDGP)F z1^vq>Io?U^PvvxJi9qK;?zfc)adk@uKx;_(d>KX%vM*59#Fm%DNyXs`utbFB9LltT zwcRe@H@AuvTi5VQioh~1L*YlV2!74{S`l9OXLu>dfzUDmy_s#sfwF z51@Ar|(DZMP6XWCKElCB&7j-x2G97hYPxSS-;(yd^@fGT<_YlYN@5-H*M0aHV1i;LRy(H1v9hXjKDlz-`b+dM=2v8E&Ht!UiL)eUE}SQ1PNp7NKoIV5pNoL(V{}3k zH0i2=Y>ce;%ZE3-V|$H_{Nig9Y5SU!bEwJg+9idL7(WYwb)NbYTzzvFID^j6ZZN^5 zKojy&0chg2n`8#27wI_5XmW^x!cLWv54#cw-J|bI5#a%}u>|L2asa7-ka;v=uH!QM z^U|Z%gWOCD!Arm!tSJZ;R$(MCB*g_v`!lPqW)#-MI8+6UE8jWL#|Zx!K{sH9O9j`V z%OmE{|7{G6GXZ0|`OBAJ7`Z<$#WM~EG3ENM8QPQoLb(4qKJDvFj$S&<#sdcVXl=SF zfo+sLf;8O*7$6$oBT@wa*)45|-D435-+nw?I-G_KxT$p`Eyv=Ki>}UelL}%|BCdEnFuI0rHdErexbtt z0RC#rW&RGtX|f`dgShl(jsNg8s8a4PCEs(Cz|3mt-```P(@MXDS5(5{imZ|ZK^S1l zk#5d5v>j{o)Bxjsm`|YmKLbVNpYBQj(+_RFBO7EcsijkU337BoP4}d7;7<4@n>W98 z+!F;H&)Jy=a`=S!sb6n>4L;{o9kKIeP&aUU$GK8%pm;+Ka8ui?2Y=R_zHrapcNy?x zk}#ks!_UjWz2sb9mVXksb@s>d*T9XaK(Szry=V9z$v;=vusnG$*VoE*(V#ORP@dPY@{g zz;Bn6_mbx`3Tl9j-+e~~Uq;7n2ky8B9>)Q!owZ`@tYgDtTlx0`_d73r_326F?ArTF zwaY?*3qU}NDYZQM0(4JGYyVffH2?bTzOUuVy4|E$};JVapsJ;!!X0C5^I-etL! zeKrEPd6o^dO)dWaap1ufz8d!KqFwo~_`m#02DW0R|NJe#Jbmq6hwroMH`}o%*6;wA zX9H_KaE6+%V0ozJ*7I#L6_ZWouWnu&_6Z@jz|mVOy9|WE$sNEQ>|NoVh5Ip9((_p>2D6-p{-ki z$Gb3I&jkh7vGpmFZ`=@eJ}XjmWx9Bfm>?rck|u6+5YVbJ7^Ck zSm`FgW1IJCL_W33NSC^`Sj9tsN9o-tpb-kjz_@$~6s=ws3mmuf2l?|%W#hi9r8mxH z{E3bI_~GHY-=Jtpc@K;fwGZE(DwO!A*t}%_B@NvEs(<}0_%Ird$&-r(_NW_$tpe3Q z(m+EPszFJf{p9yzfqln~=KXsLeSK@U-`OJ}b84L&C&G~$nu{RMk-#h-b`^9m#`U7x-2MvpXb7+@OL zu6ygE!r#2zWee2run%}ZhO_I^;J*R?PjEl}{mZ%LPej$V+cmYvxF>IYb{@F-xAfU+ z$&V2I_jPTyX=dRJK(a3~8n z?h*R3>iyKIF-?y=|JGlV|8V%nYU%Sg{$K7fl|Dab`d$s*pT9JPKm1SqS2Gv5y&l-! zTdiSt`p@skRi-!nR{|T|K0sp^*qP|x+E}#xlJ=zL&?U=f8XoU44)K2JtJM*|9=P~< p)vd6%(Uyxr1;!{q4}q- Production: mlflow_production
- Staging: mlflow_staging
- Archived: mlflow_archived
- None: mlflow_none | diff --git a/metadata-ingestion/docs/sources/mlflow/mlflow_recipe.yml b/metadata-ingestion/docs/sources/mlflow/mlflow_recipe.yml new file mode 100644 index 00000000000000..e40be543466294 --- /dev/null +++ b/metadata-ingestion/docs/sources/mlflow/mlflow_recipe.yml @@ -0,0 +1,8 @@ +source: + type: mlflow + config: + # Coordinates + tracking_uri: tracking_uri + +sink: + # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 80e6950dc5ace5..65deadf16a5b33 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -344,6 +344,7 @@ def get_long_description(): "looker": looker_common, "lookml": looker_common, "metabase": {"requests"} | sqllineage_lib, + "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, "mssql": sql_common | {"sqlalchemy-pytds>=0.3"}, @@ -477,6 +478,7 @@ def get_long_description(): "elasticsearch", "feast" if sys.version_info >= (3, 8) else None, "iceberg" if sys.version_info >= (3, 8) else None, + "mlflow" if sys.version_info >= (3, 8) else None, "json-schema", "ldap", "looker", @@ -577,6 +579,7 @@ def get_long_description(): "lookml = datahub.ingestion.source.looker.lookml_source:LookMLSource", "datahub-lineage-file = datahub.ingestion.source.metadata.lineage:LineageFileSource", "datahub-business-glossary = datahub.ingestion.source.metadata.business_glossary:BusinessGlossaryFileSource", + "mlflow = datahub.ingestion.source.mlflow:MLflowSource", "mode = datahub.ingestion.source.mode:ModeSource", "mongodb = datahub.ingestion.source.mongodb:MongoDBSource", "mssql = datahub.ingestion.source.sql.mssql:SQLServerSource", diff --git a/metadata-ingestion/src/datahub/ingestion/source/mlflow.py b/metadata-ingestion/src/datahub/ingestion/source/mlflow.py new file mode 100644 index 00000000000000..0668defe7b0c6c --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/mlflow.py @@ -0,0 +1,321 @@ +import sys + +if sys.version_info < (3, 8): + raise ImportError("MLflow is only supported on Python 3.8+") + + +from dataclasses import dataclass +from typing import Any, Callable, Iterable, Optional, TypeVar, Union + +from mlflow import MlflowClient +from mlflow.entities import Run +from mlflow.entities.model_registry import ModelVersion, RegisteredModel +from mlflow.store.entities import PagedList +from pydantic.fields import Field + +import datahub.emitter.mce_builder as builder +from datahub.configuration.source_common import EnvConfigMixin +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.api.decorators import ( + SupportStatus, + capability, + config_class, + platform_name, + support_status, +) +from datahub.ingestion.api.source import Source, SourceCapability, SourceReport +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.metadata.schema_classes import ( + GlobalTagsClass, + MLHyperParamClass, + MLMetricClass, + MLModelGroupPropertiesClass, + MLModelPropertiesClass, + TagAssociationClass, + TagPropertiesClass, + VersionTagClass, + _Aspect, +) + +T = TypeVar("T") + + +class MLflowConfig(EnvConfigMixin): + tracking_uri: Optional[str] = Field( + default=None, + description="Tracking server URI. If not set, an MLflow default tracking_uri is used (local `mlruns/` directory or `MLFLOW_TRACKING_URI` environment variable)", + ) + registry_uri: Optional[str] = Field( + default=None, + description="Registry server URI. If not set, an MLflow default registry_uri is used (value of tracking_uri or `MLFLOW_REGISTRY_URI` environment variable)", + ) + model_name_separator: str = Field( + default="_", + description="A string which separates model name from its version (e.g. model_1 or model-1)", + ) + + +@dataclass +class MLflowRegisteredModelStageInfo: + name: str + description: str + color_hex: str + + +@platform_name("MLflow") +@config_class(MLflowConfig) +@support_status(SupportStatus.TESTING) +@capability( + SourceCapability.DESCRIPTIONS, + "Extract descriptions for MLflow Registered Models and Model Versions", +) +@capability(SourceCapability.TAGS, "Extract tags for MLflow Registered Model Stages") +class MLflowSource(Source): + platform = "mlflow" + registered_model_stages_info = ( + MLflowRegisteredModelStageInfo( + name="Production", + description="Production Stage for an ML model in MLflow Model Registry", + color_hex="#308613", + ), + MLflowRegisteredModelStageInfo( + name="Staging", + description="Staging Stage for an ML model in MLflow Model Registry", + color_hex="#FACB66", + ), + MLflowRegisteredModelStageInfo( + name="Archived", + description="Archived Stage for an ML model in MLflow Model Registry", + color_hex="#5D7283", + ), + MLflowRegisteredModelStageInfo( + name="None", + description="None Stage for an ML model in MLflow Model Registry", + color_hex="#F2F4F5", + ), + ) + + def __init__(self, ctx: PipelineContext, config: MLflowConfig): + super().__init__(ctx) + self.config = config + self.report = SourceReport() + self.client = MlflowClient( + tracking_uri=self.config.tracking_uri, + registry_uri=self.config.registry_uri, + ) + + def get_report(self) -> SourceReport: + return self.report + + def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: + yield from self._get_tags_workunits() + yield from self._get_ml_model_workunits() + + def _get_tags_workunits(self) -> Iterable[MetadataWorkUnit]: + """ + Create tags for each Stage in MLflow Model Registry. + """ + for stage_info in self.registered_model_stages_info: + tag_urn = self._make_stage_tag_urn(stage_info.name) + tag_properties = TagPropertiesClass( + name=self._make_stage_tag_name(stage_info.name), + description=stage_info.description, + colorHex=stage_info.color_hex, + ) + wu = self._create_workunit(urn=tag_urn, aspect=tag_properties) + yield wu + + def _make_stage_tag_urn(self, stage_name: str) -> str: + tag_name = self._make_stage_tag_name(stage_name) + tag_urn = builder.make_tag_urn(tag_name) + return tag_urn + + def _make_stage_tag_name(self, stage_name: str) -> str: + return f"{self.platform}_{stage_name.lower()}" + + def _create_workunit(self, urn: str, aspect: _Aspect) -> MetadataWorkUnit: + """ + Utility to create an MCP workunit. + """ + return MetadataChangeProposalWrapper( + entityUrn=urn, + aspect=aspect, + ).as_workunit() + + def _get_ml_model_workunits(self) -> Iterable[MetadataWorkUnit]: + """ + Traverse each Registered Model in Model Registry and generate a corresponding workunit. + """ + registered_models = self._get_mlflow_registered_models() + for registered_model in registered_models: + yield self._get_ml_group_workunit(registered_model) + model_versions = self._get_mlflow_model_versions(registered_model) + for model_version in model_versions: + run = self._get_mlflow_run(model_version) + yield self._get_ml_model_properties_workunit( + registered_model=registered_model, + model_version=model_version, + run=run, + ) + yield self._get_global_tags_workunit(model_version=model_version) + + def _get_mlflow_registered_models(self) -> Iterable[RegisteredModel]: + """ + Get all Registered Models in MLflow Model Registry. + """ + registered_models: Iterable[ + RegisteredModel + ] = self._traverse_mlflow_search_func( + search_func=self.client.search_registered_models, + ) + return registered_models + + @staticmethod + def _traverse_mlflow_search_func( + search_func: Callable[..., PagedList[T]], + **kwargs: Any, + ) -> Iterable[T]: + """ + Utility to traverse an MLflow search_* functions which return PagedList. + """ + next_page_token = None + while True: + paged_list = search_func(page_token=next_page_token, **kwargs) + yield from paged_list.to_list() + next_page_token = paged_list.token + if not next_page_token: + return + + def _get_ml_group_workunit( + self, + registered_model: RegisteredModel, + ) -> MetadataWorkUnit: + """ + Generate an MLModelGroup workunit for an MLflow Registered Model. + """ + ml_model_group_urn = self._make_ml_model_group_urn(registered_model) + ml_model_group_properties = MLModelGroupPropertiesClass( + customProperties=registered_model.tags, + description=registered_model.description, + createdAt=registered_model.creation_timestamp, + ) + wu = self._create_workunit( + urn=ml_model_group_urn, + aspect=ml_model_group_properties, + ) + return wu + + def _make_ml_model_group_urn(self, registered_model: RegisteredModel) -> str: + urn = builder.make_ml_model_group_urn( + platform=self.platform, + group_name=registered_model.name, + env=self.config.env, + ) + return urn + + def _get_mlflow_model_versions( + self, + registered_model: RegisteredModel, + ) -> Iterable[ModelVersion]: + """ + Get all Model Versions for each Registered Model. + """ + filter_string = f"name = '{registered_model.name}'" + model_versions: Iterable[ModelVersion] = self._traverse_mlflow_search_func( + search_func=self.client.search_model_versions, + filter_string=filter_string, + ) + return model_versions + + def _get_mlflow_run(self, model_version: ModelVersion) -> Union[None, Run]: + """ + Get a Run associated with a Model Version. Some MVs may exist without Run. + """ + if model_version.run_id: + run = self.client.get_run(model_version.run_id) + return run + else: + return None + + def _get_ml_model_properties_workunit( + self, + registered_model: RegisteredModel, + model_version: ModelVersion, + run: Union[None, Run], + ) -> MetadataWorkUnit: + """ + Generate an MLModel workunit for an MLflow Model Version. + Every Model Version is a DataHub MLModel entity associated with an MLModelGroup corresponding to a Registered Model. + If a model was registered without an associated Run then hyperparams and metrics are not available. + """ + ml_model_group_urn = self._make_ml_model_group_urn(registered_model) + ml_model_urn = self._make_ml_model_urn(model_version) + if run: + hyperparams = [ + MLHyperParamClass(name=k, value=str(v)) + for k, v in run.data.params.items() + ] + training_metrics = [ + MLMetricClass(name=k, value=str(v)) for k, v in run.data.metrics.items() + ] + else: + hyperparams = None + training_metrics = None + ml_model_properties = MLModelPropertiesClass( + customProperties=model_version.tags, + externalUrl=self._make_external_url(model_version), + description=model_version.description, + date=model_version.creation_timestamp, + version=VersionTagClass(versionTag=str(model_version.version)), + hyperParams=hyperparams, + trainingMetrics=training_metrics, + # mlflow tags are dicts, but datahub tags are lists. currently use only keys from mlflow tags + tags=list(model_version.tags.keys()), + groups=[ml_model_group_urn], + ) + wu = self._create_workunit(urn=ml_model_urn, aspect=ml_model_properties) + return wu + + def _make_ml_model_urn(self, model_version: ModelVersion) -> str: + urn = builder.make_ml_model_urn( + platform=self.platform, + model_name=f"{model_version.name}{self.config.model_name_separator}{model_version.version}", + env=self.config.env, + ) + return urn + + def _make_external_url(self, model_version: ModelVersion) -> Union[None, str]: + """ + Generate URL for a Model Version to MLflow UI. + """ + base_uri = self.client.tracking_uri + if base_uri.startswith("http"): + return f"{base_uri.rstrip('/')}/#/models/{model_version.name}/versions/{model_version.version}" + else: + return None + + def _get_global_tags_workunit( + self, + model_version: ModelVersion, + ) -> MetadataWorkUnit: + """ + Associate a Model Version Stage with a corresponding tag. + """ + global_tags = GlobalTagsClass( + tags=[ + TagAssociationClass( + tag=self._make_stage_tag_urn(model_version.current_stage), + ), + ] + ) + wu = self._create_workunit( + urn=self._make_ml_model_urn(model_version), + aspect=global_tags, + ) + return wu + + @classmethod + def create(cls, config_dict: dict, ctx: PipelineContext) -> Source: + config = MLflowConfig.parse_obj(config_dict) + return cls(ctx, config) diff --git a/metadata-ingestion/tests/integration/mlflow/mlflow_mcps_golden.json b/metadata-ingestion/tests/integration/mlflow/mlflow_mcps_golden.json new file mode 100644 index 00000000000000..c70625c74d9983 --- /dev/null +++ b/metadata-ingestion/tests/integration/mlflow/mlflow_mcps_golden.json @@ -0,0 +1,238 @@ +[ +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_production", + "changeType": "UPSERT", + "aspectName": "tagProperties", + "aspect": { + "json": { + "name": "mlflow_production", + "description": "Production Stage for an ML model in MLflow Model Registry", + "colorHex": "#308613" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_staging", + "changeType": "UPSERT", + "aspectName": "tagProperties", + "aspect": { + "json": { + "name": "mlflow_staging", + "description": "Staging Stage for an ML model in MLflow Model Registry", + "colorHex": "#FACB66" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_archived", + "changeType": "UPSERT", + "aspectName": "tagProperties", + "aspect": { + "json": { + "name": "mlflow_archived", + "description": "Archived Stage for an ML model in MLflow Model Registry", + "colorHex": "#5D7283" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_none", + "changeType": "UPSERT", + "aspectName": "tagProperties", + "aspect": { + "json": { + "name": "mlflow_none", + "description": "None Stage for an ML model in MLflow Model Registry", + "colorHex": "#F2F4F5" + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "mlModelGroup", + "entityUrn": "urn:li:mlModelGroup:(urn:li:dataPlatform:mlflow,test-model,PROD)", + "changeType": "UPSERT", + "aspectName": "mlModelGroupProperties", + "aspect": { + "json": { + "customProperties": { + "model_env": "test", + "model_id": "1" + }, + "description": "This a test registered model", + "createdAt": 1615443388097 + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "mlModel", + "entityUrn": "urn:li:mlModel:(urn:li:dataPlatform:mlflow,test-model_1,PROD)", + "changeType": "UPSERT", + "aspectName": "mlModelProperties", + "aspect": { + "json": { + "customProperties": { + "model_version_id": "1" + }, + "date": 1615443388097, + "version": { + "versionTag": "1" + }, + "hyperParams": [ + { + "name": "p", + "value": "1" + } + ], + "trainingMetrics": [ + { + "name": "m", + "value": "0.85" + } + ], + "tags": [ + "model_version_id" + ], + "groups": [ + "urn:li:mlModelGroup:(urn:li:dataPlatform:mlflow,test-model,PROD)" + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "mlModel", + "entityUrn": "urn:li:mlModel:(urn:li:dataPlatform:mlflow,test-model_1,PROD)", + "changeType": "UPSERT", + "aspectName": "globalTags", + "aspect": { + "json": { + "tags": [ + { + "tag": "urn:li:tag:mlflow_archived" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "mlModel", + "entityUrn": "urn:li:mlModel:(urn:li:dataPlatform:mlflow,test-model_1,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "mlModelGroup", + "entityUrn": "urn:li:mlModelGroup:(urn:li:dataPlatform:mlflow,test-model,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_staging", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_archived", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_production", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +}, +{ + "entityType": "tag", + "entityUrn": "urn:li:tag:mlflow_none", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1615443388097, + "runId": "mlflow-source-test" + } +} +] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py b/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py new file mode 100644 index 00000000000000..76af666526555a --- /dev/null +++ b/metadata-ingestion/tests/integration/mlflow/test_mlflow_source.py @@ -0,0 +1,104 @@ +import sys + +if sys.version_info >= (3, 8): + from pathlib import Path + from typing import Any, Dict, TypeVar + + import pytest + from mlflow import MlflowClient + + from datahub.ingestion.run.pipeline import Pipeline + from tests.test_helpers import mce_helpers + + T = TypeVar("T") + + @pytest.fixture + def tracking_uri(tmp_path: Path) -> str: + return str(tmp_path / "mlruns") + + @pytest.fixture + def sink_file_path(tmp_path: Path) -> str: + return str(tmp_path / "mlflow_source_mcps.json") + + @pytest.fixture + def pipeline_config(tracking_uri: str, sink_file_path: str) -> Dict[str, Any]: + source_type = "mlflow" + return { + "run_id": "mlflow-source-test", + "source": { + "type": source_type, + "config": { + "tracking_uri": tracking_uri, + }, + }, + "sink": { + "type": "file", + "config": { + "filename": sink_file_path, + }, + }, + } + + @pytest.fixture + def generate_mlflow_data(tracking_uri: str) -> None: + client = MlflowClient(tracking_uri=tracking_uri) + experiment_name = "test-experiment" + run_name = "test-run" + model_name = "test-model" + test_experiment_id = client.create_experiment(experiment_name) + test_run = client.create_run( + experiment_id=test_experiment_id, + run_name=run_name, + ) + client.log_param( + run_id=test_run.info.run_id, + key="p", + value=1, + ) + client.log_metric( + run_id=test_run.info.run_id, + key="m", + value=0.85, + ) + client.create_registered_model( + name=model_name, + tags=dict( + model_id=1, + model_env="test", + ), + description="This a test registered model", + ) + client.create_model_version( + name=model_name, + source="dummy_dir/dummy_file", + run_id=test_run.info.run_id, + tags=dict(model_version_id=1), + ) + client.transition_model_version_stage( + name=model_name, + version="1", + stage="Archived", + ) + + def test_ingestion( + pytestconfig, + mock_time, + sink_file_path, + pipeline_config, + generate_mlflow_data, + ): + print(f"MCPs file path: {sink_file_path}") + golden_file_path = ( + pytestconfig.rootpath / "tests/integration/mlflow/mlflow_mcps_golden.json" + ) + + pipeline = Pipeline.create(pipeline_config) + pipeline.run() + pipeline.pretty_print_summary() + pipeline.raise_from_status() + + mce_helpers.check_golden_file( + pytestconfig=pytestconfig, + output_path=sink_file_path, + golden_path=golden_file_path, + ) diff --git a/metadata-ingestion/tests/unit/test_mlflow_source.py b/metadata-ingestion/tests/unit/test_mlflow_source.py new file mode 100644 index 00000000000000..97b5afd3d6a4ef --- /dev/null +++ b/metadata-ingestion/tests/unit/test_mlflow_source.py @@ -0,0 +1,133 @@ +import sys + +if sys.version_info >= (3, 8): + import datetime + from pathlib import Path + from typing import Any, TypeVar, Union + + import pytest + from mlflow import MlflowClient + from mlflow.entities.model_registry import RegisteredModel + from mlflow.entities.model_registry.model_version import ModelVersion + from mlflow.store.entities import PagedList + + from datahub.ingestion.api.common import PipelineContext + from datahub.ingestion.source.mlflow import MLflowConfig, MLflowSource + + T = TypeVar("T") + + @pytest.fixture + def tracking_uri(tmp_path: Path) -> str: + return str(tmp_path / "mlruns") + + @pytest.fixture + def source(tracking_uri: str) -> MLflowSource: + return MLflowSource( + ctx=PipelineContext(run_id="mlflow-source-test"), + config=MLflowConfig(tracking_uri=tracking_uri), + ) + + @pytest.fixture + def registered_model(source: MLflowSource) -> RegisteredModel: + model_name = "abc" + return RegisteredModel(name=model_name) + + @pytest.fixture + def model_version( + source: MLflowSource, + registered_model: RegisteredModel, + ) -> ModelVersion: + version = "1" + return ModelVersion( + name=registered_model.name, + version=version, + creation_timestamp=datetime.datetime.now(), + ) + + def dummy_search_func(page_token: Union[None, str], **kwargs: Any) -> PagedList[T]: + dummy_pages = dict( + page_1=PagedList(items=["a", "b"], token="page_2"), + page_2=PagedList(items=["c", "d"], token="page_3"), + page_3=PagedList(items=["e"], token=None), + ) + if page_token is None: + page_to_return = dummy_pages["page_1"] + else: + page_to_return = dummy_pages[page_token] + if kwargs.get("case", "") == "upper": + page_to_return = PagedList( + items=[e.upper() for e in page_to_return.to_list()], + token=page_to_return.token, + ) + return page_to_return + + def test_stages(source): + mlflow_registered_model_stages = { + "Production", + "Staging", + "Archived", + None, + } + workunits = source._get_tags_workunits() + names = [wu.get_metadata()["metadata"].aspect.name for wu in workunits] + + assert len(names) == len(mlflow_registered_model_stages) + assert set(names) == { + "mlflow_" + str(stage).lower() for stage in mlflow_registered_model_stages + } + + def test_config_model_name_separator(source, model_version): + name_version_sep = "+" + source.config.model_name_separator = name_version_sep + expected_model_name = ( + f"{model_version.name}{name_version_sep}{model_version.version}" + ) + expected_urn = f"urn:li:mlModel:(urn:li:dataPlatform:mlflow,{expected_model_name},{source.config.env})" + + urn = source._make_ml_model_urn(model_version) + + assert urn == expected_urn + + def test_model_without_run(source, registered_model, model_version): + run = source._get_mlflow_run(model_version) + wu = source._get_ml_model_properties_workunit( + registered_model=registered_model, + model_version=model_version, + run=run, + ) + aspect = wu.get_metadata()["metadata"].aspect + + assert aspect.hyperParams is None + assert aspect.trainingMetrics is None + + def test_traverse_mlflow_search_func(source): + expected_items = ["a", "b", "c", "d", "e"] + + items = list(source._traverse_mlflow_search_func(dummy_search_func)) + + assert items == expected_items + + def test_traverse_mlflow_search_func_with_kwargs(source): + expected_items = ["A", "B", "C", "D", "E"] + + items = list( + source._traverse_mlflow_search_func(dummy_search_func, case="upper") + ) + + assert items == expected_items + + def test_make_external_link_local(source, model_version): + expected_url = None + + url = source._make_external_url(model_version) + + assert url == expected_url + + def test_make_external_link_remote(source, model_version): + tracking_uri_remote = "https://dummy-mlflow-tracking-server.org" + source.client = MlflowClient(tracking_uri=tracking_uri_remote) + expected_url = f"{tracking_uri_remote}/#/models/{model_version.name}/versions/{model_version.version}" + + url = source._make_external_url(model_version) + + assert url == expected_url diff --git a/metadata-service/war/src/main/resources/boot/data_platforms.json b/metadata-service/war/src/main/resources/boot/data_platforms.json index 7a7cec60aa25f0..3d956c5774dedb 100644 --- a/metadata-service/war/src/main/resources/boot/data_platforms.json +++ b/metadata-service/war/src/main/resources/boot/data_platforms.json @@ -346,6 +346,16 @@ "logoUrl": "/assets/platforms/sagemakerlogo.png" } }, + { + "urn": "urn:li:dataPlatform:mlflow", + "aspect": { + "datasetNameDelimiter": ".", + "name": "mlflow", + "displayName": "MLflow", + "type": "OTHERS", + "logoUrl": "/assets/platforms/mlflowlogo.png" + } + }, { "urn": "urn:li:dataPlatform:glue", "aspect": { From ca4dc4e3d228e0612a42e7a3e0895573ab38586b Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Tue, 26 Sep 2023 19:08:09 +0100 Subject: [PATCH 8/9] feat(docs): Update ownership-types image urls (#8905) --- docs/ownership/ownership-types.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/ownership/ownership-types.md b/docs/ownership/ownership-types.md index 2dbefaa488140d..f1b951871a5a23 100644 --- a/docs/ownership/ownership-types.md +++ b/docs/ownership/ownership-types.md @@ -47,7 +47,7 @@ To manage a Custom Ownership type, first navigate to the DataHub Admin page:

- +

Then navigate to the `Ownership Types` tab under the `Management` section. @@ -61,7 +61,7 @@ Inside the form, you can choose a name for your Ownership Type. You can also add Don't worry, this can be changed later.

- +

Once you've chosen a name and a description, click 'Save' to create the new Ownership Type. @@ -162,13 +162,13 @@ You can assign an owner with a custom ownership type to an entity either using t On an Entity's profile page, use the right sidebar to locate the Owners section.

- +

Click 'Add Owners', select the owner you want and then search for the Custom Ownership Type you'd like to add this asset to. When you're done, click 'Add'.

- +

To remove ownership from an asset, click the 'x' icon on the Owner label. From f95d1ae8201327ad8ab143a4a1df6ac7d523bea5 Mon Sep 17 00:00:00 2001 From: Jeff Merrick Date: Tue, 26 Sep 2023 14:26:06 -0500 Subject: [PATCH 9/9] docs(website): style tweaks for readability and more open spacing (#8876) Co-authored-by: socar-dini <0327jane@gmail.com> Co-authored-by: Harshal Sheth --- docs-website/docusaurus.config.js | 30 +-- .../components/Feedback/styles.module.scss | 2 +- .../MarkpromptHelp/markprompthelp.module.scss | 2 - .../pages/_components/Hero/hero.module.scss | 23 ++ .../src/pages/_components/Hero/index.js | 13 +- .../_components/Section/section.module.scss | 1 - .../DropDownFilter/search.module.scss | 1 - .../GuideList/guidelist.module.scss | 3 + .../docs/_components/GuideList/index.jsx | 2 +- .../QuickLinkCard/quicklinkcard.module.scss | 4 + .../_components/SearchBar/search.module.scss | 1 - docs-website/src/styles/acryl.scss | 3 + docs-website/src/styles/datahub.scss | 2 + docs-website/src/styles/global.scss | 197 +++++++++++------- .../img/acryl-logo-transparent-mark.svg | 1 + 15 files changed, 174 insertions(+), 111 deletions(-) create mode 100644 docs-website/static/img/acryl-logo-transparent-mark.svg diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index 9bdba5f3175426..c1ecf0283cf63c 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -12,13 +12,11 @@ module.exports = { organizationName: "datahub-project", // Usually your GitHub org/user name. projectName: "datahub", // Usually your repo name. staticDirectories: ["static", "genStatic"], - stylesheets: [ - "https://fonts.googleapis.com/css2?family=Manrope:wght@400;600&display=swap", - ], + stylesheets: ["https://fonts.googleapis.com/css2?family=Manrope:wght@400;500;700&display=swap"], noIndex: isSaas, customFields: { isSaas: isSaas, - markpromptProjectKey: process.env.DOCUSAURUS_MARKPROMPT_PROJECT_KEY || 'IeF3CUFCUQWuouZ8MP5Np9nES52QAtaA', + markpromptProjectKey: process.env.DOCUSAURUS_MARKPROMPT_PROJECT_KEY || "IeF3CUFCUQWuouZ8MP5Np9nES52QAtaA", }, themeConfig: { ...(!isSaas && { @@ -35,12 +33,8 @@ module.exports = { title: null, logo: { alt: "DataHub Logo", - src: `img/${ - isSaas ? "acryl" : "datahub" - }-logo-color-light-horizontal.svg`, - srcDark: `img/${ - isSaas ? "acryl" : "datahub" - }-logo-color-dark-horizontal.svg`, + src: `img/${isSaas ? "acryl" : "datahub"}-logo-color-light-horizontal.svg`, + srcDark: `img/${isSaas ? "acryl" : "datahub"}-logo-color-dark-horizontal.svg`, }, items: [ { @@ -50,7 +44,8 @@ module.exports = { position: "right", }, { - href: "/integrations", + to: "/integrations", + activeBasePath: "integrations", label: "Integrations", position: "right", }, @@ -70,8 +65,8 @@ module.exports = { position: "right", }, { - type: 'docsVersionDropdown', - position: 'right', + type: "docsVersionDropdown", + position: "right", dropdownActiveClassDisabled: true, }, { @@ -201,9 +196,7 @@ module.exports = { blog: false, theme: { customCss: [ - isSaas - ? require.resolve("./src/styles/acryl.scss") - : require.resolve("./src/styles/datahub.scss"), + isSaas ? require.resolve("./src/styles/acryl.scss") : require.resolve("./src/styles/datahub.scss"), require.resolve("./src/styles/global.scss"), require.resolve("./src/styles/sphinx.scss"), require.resolve("./src/styles/config-table.scss"), @@ -217,10 +210,7 @@ module.exports = { ], ], plugins: [ - [ - "@docusaurus/plugin-ideal-image", - { quality: 100, sizes: [320, 640, 1280, 1440, 1600] }, - ], + ["@docusaurus/plugin-ideal-image", { quality: 100, sizes: [320, 640, 1280, 1440, 1600] }], "docusaurus-plugin-sass", [ "docusaurus-graphql-plugin", diff --git a/docs-website/src/components/Feedback/styles.module.scss b/docs-website/src/components/Feedback/styles.module.scss index b0fa3d7d1bd2bc..ee22f6b0550126 100644 --- a/docs-website/src/components/Feedback/styles.module.scss +++ b/docs-website/src/components/Feedback/styles.module.scss @@ -37,11 +37,11 @@ } .feedbackText { + font-family: var(--ifm-font-family-base); width: 100%; border: var(--ifm-hr-border-color) 1px solid; border-radius: 0.4rem; padding: 0.4rem; - font-family: "Manrope", sans-serif; } .feedbackButton { diff --git a/docs-website/src/components/MarkpromptHelp/markprompthelp.module.scss b/docs-website/src/components/MarkpromptHelp/markprompthelp.module.scss index 270877cd04a9ff..0d874cad11790f 100644 --- a/docs-website/src/components/MarkpromptHelp/markprompthelp.module.scss +++ b/docs-website/src/components/MarkpromptHelp/markprompthelp.module.scss @@ -325,7 +325,6 @@ button { padding-left: 1.5714286em; } .MarkpromptAnswer ol > li::marker { - font-weight: 400; color: var(--markprompt-foreground); } .MarkpromptAnswer ul > li::marker { @@ -454,7 +453,6 @@ button { background-color: var(--markprompt-muted); border: 1px solid var(--markprompt-border); overflow-x: auto; - font-weight: 400; font-size: 0.8571429em; line-height: 1.6666667; margin-top: 1.6666667em; diff --git a/docs-website/src/pages/_components/Hero/hero.module.scss b/docs-website/src/pages/_components/Hero/hero.module.scss index c2103bb0782bd2..6e4a623f469d51 100644 --- a/docs-website/src/pages/_components/Hero/hero.module.scss +++ b/docs-website/src/pages/_components/Hero/hero.module.scss @@ -74,3 +74,26 @@ margin-right: 0.5rem; } } + +.quickstartContent { + text-align: center; + padding: 2rem 0; + height: 100%; + margin: 2rem 0; + background: #34394d; + border-radius: var(--ifm-card-border-radius); +} + +.quickstartTitle { + color: #fafafa; +} + +.quickstartSubtitle { + font-size: 1.1rem; + color: gray; +} + +.quickstartCodeblock { + text-align: left; + padding: 0 20vh; +} diff --git a/docs-website/src/pages/_components/Hero/index.js b/docs-website/src/pages/_components/Hero/index.js index 22b406dce037ef..ffa298b27a8225 100644 --- a/docs-website/src/pages/_components/Hero/index.js +++ b/docs-website/src/pages/_components/Hero/index.js @@ -34,12 +34,11 @@ const Hero = ({}) => { complexity of your data ecosystem.

- Built with ❤️ by{" "} - {" "} + Built with ❤️ by {" "} Acryl Data {" "} - and LinkedIn. + and LinkedIn.

Get Started → @@ -51,10 +50,10 @@ const Hero = ({}) => { DataHub Flow Diagram -
-

Get Started Now

-

Run the following command to get started with DataHub.

-
+
+

Get Started Now

+

Run the following command to get started with DataHub.

+
python3 -m pip install --upgrade pip wheel setuptools
python3 -m pip install --upgrade acryl-datahub
diff --git a/docs-website/src/pages/_components/Section/section.module.scss b/docs-website/src/pages/_components/Section/section.module.scss index 4b68ce5533d4d0..7a39a60b6fa4c2 100644 --- a/docs-website/src/pages/_components/Section/section.module.scss +++ b/docs-website/src/pages/_components/Section/section.module.scss @@ -9,7 +9,6 @@ .sectionTitle { font-size: 2.5rem; margin-bottom: 3rem; - font-weight: normal; text-align: center; } diff --git a/docs-website/src/pages/docs/_components/DropDownFilter/search.module.scss b/docs-website/src/pages/docs/_components/DropDownFilter/search.module.scss index 17e5f224906646..2ae0f5c849ba92 100644 --- a/docs-website/src/pages/docs/_components/DropDownFilter/search.module.scss +++ b/docs-website/src/pages/docs/_components/DropDownFilter/search.module.scss @@ -74,7 +74,6 @@ } .searchResultItemHeading { - font-weight: 400; margin-bottom: 0; } diff --git a/docs-website/src/pages/docs/_components/GuideList/guidelist.module.scss b/docs-website/src/pages/docs/_components/GuideList/guidelist.module.scss index a8f279e74ef873..46b1b01408592a 100644 --- a/docs-website/src/pages/docs/_components/GuideList/guidelist.module.scss +++ b/docs-website/src/pages/docs/_components/GuideList/guidelist.module.scss @@ -16,6 +16,9 @@ display: block; margin-bottom: 0.25rem; } + strong { + font-weight: 600; + } span { font-size: 0.875rem; line-height: 1.25em; diff --git a/docs-website/src/pages/docs/_components/GuideList/index.jsx b/docs-website/src/pages/docs/_components/GuideList/index.jsx index 3a47e1691aeeae..9d4b50b5f5159c 100644 --- a/docs-website/src/pages/docs/_components/GuideList/index.jsx +++ b/docs-website/src/pages/docs/_components/GuideList/index.jsx @@ -19,7 +19,7 @@ const GuideList = ({ title, content, seeMoreLink }) => content?.length > 0 ? (
-

{title}

+

{title}

{content.map((props, idx) => ( diff --git a/docs-website/src/pages/docs/_components/QuickLinkCard/quicklinkcard.module.scss b/docs-website/src/pages/docs/_components/QuickLinkCard/quicklinkcard.module.scss index cf239ff8643ba8..4fbbc4583d6629 100644 --- a/docs-website/src/pages/docs/_components/QuickLinkCard/quicklinkcard.module.scss +++ b/docs-website/src/pages/docs/_components/QuickLinkCard/quicklinkcard.module.scss @@ -20,6 +20,10 @@ display: block; margin-bottom: 0.25rem; } + strong { + font-weight: 600; + } + span { font-size: 0.875rem; line-height: 1.25em; diff --git a/docs-website/src/pages/docs/_components/SearchBar/search.module.scss b/docs-website/src/pages/docs/_components/SearchBar/search.module.scss index 6faaf19c7e6031..d85607b08e4e75 100644 --- a/docs-website/src/pages/docs/_components/SearchBar/search.module.scss +++ b/docs-website/src/pages/docs/_components/SearchBar/search.module.scss @@ -91,7 +91,6 @@ } .searchResultItemHeading { - font-weight: 400; margin-bottom: 0; } diff --git a/docs-website/src/styles/acryl.scss b/docs-website/src/styles/acryl.scss index 8eb9b375830bf8..8bb25ca28cb38b 100644 --- a/docs-website/src/styles/acryl.scss +++ b/docs-website/src/styles/acryl.scss @@ -7,4 +7,7 @@ --ifm-color-primary-light: #13beb0; --ifm-color-primary-lighter: #14c7b8; --ifm-color-primary-lightest: #16e1d0; + + // Custom + --ifm-color-primary-opaque: rgba(17, 173, 160, 0.1); } diff --git a/docs-website/src/styles/datahub.scss b/docs-website/src/styles/datahub.scss index a41359c24b833a..8d8f0bdd6daa90 100644 --- a/docs-website/src/styles/datahub.scss +++ b/docs-website/src/styles/datahub.scss @@ -7,4 +7,6 @@ --ifm-color-primary-light: #349dff; --ifm-color-primary-lighter: #42a4ff; --ifm-color-primary-lightest: #6cb8ff; + + --ifm-color-primary-opaque: rgba(24, 144, 255, 0.1); } diff --git a/docs-website/src/styles/global.scss b/docs-website/src/styles/global.scss index 013e9fb9f0d9a8..55a54876b41acd 100644 --- a/docs-website/src/styles/global.scss +++ b/docs-website/src/styles/global.scss @@ -7,16 +7,23 @@ /* You can override the default Infima variables here. */ :root { - font-family: "Manrope", sans-serif; + // Global --ifm-background-color: #ffffff; + --ifm-global-spacing: 1rem; /* Typography */ - --ifm-heading-font-weight: 600; - --ifm-font-weight-semibold: 600; + --ifm-font-size-base: 95%; + --ifm-heading-font-weight: 700; --ifm-code-font-size: 0.9em; --ifm-heading-color: #000000; + --ifm-heading-font-family: "Manrope", sans-serif; + --ifm-font-family-base: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", + "Segoe UI Emoji", "Segoe UI Symbol"; + --ifm-font-family-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; + /* Buttons */ --ifm-button-border-radius: 1000em; + --ifm-button-font-weight: 600; /* Navbar */ --ifm-navbar-background-color: var(--ifm-background-color); @@ -30,6 +37,7 @@ --ifm-hr-border-width: 1px 0 0 0; --ifm-hr-border-color: #e3e3e3; --ifm-hr-background-color: #e3e3e3; + /* More Colors */ --ifm-hero-background-color: var(--ifm-background-color); --ifm-background-surface-color: #fafafa; @@ -37,6 +45,19 @@ /* Cards */ --ifm-card-background-color: --ifm-background-color; --ifm-card-border-radius: calc(var(--ifm-global-radius) * 1.5); + + /* Menu */ + --ifm-menu-link-padding-vertical: 0.6rem; + --ifm-menu-link-padding-horizontal: 1rem; + --ifm-menu-link-sublist-icon: url('data:image/svg+xml;utf8,'); + --ifm-menu-color-background-hover: var(--ifm-color-primary-opaque); + --ifm-menu-color-background-active: var(--ifm-color-primary-opaque); + + /* TOC */ + --ifm-toc-padding-vertical: 1.25rem; + + /* Breadcrumbs */ + --ifm-breadcrumb-item-background-active: var(--ifm-color-primary-opaque); } html[data-theme="dark"] { @@ -49,16 +70,29 @@ html[data-theme="dark"] { .button--primary { color: #fff; } + + .invert-on-dark { + filter: invert(1); + } +} + +/* Main Docs Content Area */ + +main { + padding-top: 1rem; } -h1 { - font-weight: 400; +.markdown, +main > h1 { + margin-top: 1rem; } -html[data-theme="dark"] .invert-on-dark { - filter: invert(1); +[class*="docItemCol"] { + padding: 0 2rem; } +/* Custom Utility */ + .row--centered { align-items: center; } @@ -67,6 +101,8 @@ html[data-theme="dark"] .invert-on-dark { padding: 5vh 0; } +/* Announcement Bar */ + div[class^="announcementBar"] { z-index: calc(var(--ifm-z-index-fixed) - 1); div { @@ -106,6 +142,8 @@ div[class^="announcementBar"] { } } +/** Navbar */ + @media only screen and (max-width: 1050px) { .navbar__toggle { display: inherit; @@ -158,16 +196,7 @@ div[class^="announcementBar"] { } } -.footer { - .footer__copyright { - text-align: left; - font-size: 0.8em; - opacity: 0.5; - } - &.footer--dark { - --ifm-footer-background-color: #000000; - } -} +/* Misc */ .button { white-space: initial; @@ -192,64 +221,93 @@ div[class^="announcementBar"] { } } +.footer { + .footer__copyright { + text-align: left; + font-size: 0.8em; + opacity: 0.5; + } + &.footer--dark { + --ifm-footer-background-color: #000000; + } +} + +/* Hero */ + .hero { padding: 5vh 0; -} -.hero__subtitle { - font-size: 1.25em; - margin: 1rem auto 3rem; - max-width: 800px; -} -.hero__content { - text-align: center; - padding: 2rem 0; - height: 100%; -} + .hero__subtitle { + font-size: 1.25em; + margin: 1rem auto 3rem; + max-width: 800px; -.quickstart__content { - text-align: center; - padding: 2rem 0; - height: 100%; - margin: 2rem 0; - background: #34394d; - border-radius: var(--ifm-card-border-radius); + img { + vertical-align: middle; + margin-top: -0.3em; + } + } + .hero__content { + text-align: center; + padding: 2rem 0; + height: 100%; + } } -.quickstart__title { - color: #fafafa; -} +/* Sidebar Menu */ + +.menu .theme-doc-sidebar-menu { + ul li.saasOnly a.menu__link { + &:after { + content: ""; + display: block; + width: 20px; + height: 20px; + flex-shrink: 0; + margin-right: auto; + margin-left: 10px; + opacity: 0.5; + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='64 64 896 896' focusable='false' data-icon='cloud' width='1em' height='1em' fill='currentColor' aria-hidden='true'%3E%3Cpath d='M811.4 418.7C765.6 297.9 648.9 212 512.2 212S258.8 297.8 213 418.6C127.3 441.1 64 519.1 64 612c0 110.5 89.5 200 199.9 200h496.2C870.5 812 960 722.5 960 612c0-92.7-63.1-170.7-148.6-193.3zm36.3 281a123.07 123.07 0 01-87.6 36.3H263.9c-33.1 0-64.2-12.9-87.6-36.3A123.3 123.3 0 01140 612c0-28 9.1-54.3 26.2-76.3a125.7 125.7 0 0166.1-43.7l37.9-9.9 13.9-36.6c8.6-22.8 20.6-44.1 35.7-63.4a245.6 245.6 0 0152.4-49.9c41.1-28.9 89.5-44.2 140-44.2s98.9 15.3 140 44.2c19.9 14 37.5 30.8 52.4 49.9 15.1 19.3 27.1 40.7 35.7 63.4l13.8 36.5 37.8 10c54.3 14.5 92.1 63.8 92.1 120 0 33.1-12.9 64.3-36.3 87.7z'%3E%3C/path%3E%3C/svg%3E"); + background-repeat: no-repeat; + background-position: 50% 50%; + background-size: 20px 20px; + [data-theme="dark"] & { + filter: invert(1); + } + } + } -.quickstart__subtitle { - font-size: 1.1rem; - color: gray; -} + .theme-doc-sidebar-item-category-level-1 .menu__link { + font-weight: 400; + } + + .theme-doc-sidebar-item-category-level-1 .menu__link--active { + font-weight: 600; + } -.quickstart__codeblock { - text-align: left; - padding: 0 20vh; + .theme-doc-sidebar-item-category-level-1 > div > a:first-child { + color: var(--ifm-navbar-link-color); + font-weight: 600; + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) var(--ifm-menu-link-padding-horizontal); + } + .theme-doc-sidebar-item-category-level-1 > div > a.menu__link--active { + color: var(--ifm-navbar-link-color); + font-weight: 600; + } } -.theme-doc-sidebar-menu ul li.saasOnly a.menu__link { - &:after { - content: ""; - display: block; - width: 20px; - height: 20px; - flex-shrink: 0; - margin-right: auto; - margin-left: 10px; - opacity: 0.5; - background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='64 64 896 896' focusable='false' data-icon='cloud' width='1em' height='1em' fill='currentColor' aria-hidden='true'%3E%3Cpath d='M811.4 418.7C765.6 297.9 648.9 212 512.2 212S258.8 297.8 213 418.6C127.3 441.1 64 519.1 64 612c0 110.5 89.5 200 199.9 200h496.2C870.5 812 960 722.5 960 612c0-92.7-63.1-170.7-148.6-193.3zm36.3 281a123.07 123.07 0 01-87.6 36.3H263.9c-33.1 0-64.2-12.9-87.6-36.3A123.3 123.3 0 01140 612c0-28 9.1-54.3 26.2-76.3a125.7 125.7 0 0166.1-43.7l37.9-9.9 13.9-36.6c8.6-22.8 20.6-44.1 35.7-63.4a245.6 245.6 0 0152.4-49.9c41.1-28.9 89.5-44.2 140-44.2s98.9 15.3 140 44.2c19.9 14 37.5 30.8 52.4 49.9 15.1 19.3 27.1 40.7 35.7 63.4l13.8 36.5 37.8 10c54.3 14.5 92.1 63.8 92.1 120 0 33.1-12.9 64.3-36.3 87.7z'%3E%3C/path%3E%3C/svg%3E"); - background-repeat: no-repeat; - background-position: 50% 50%; - background-size: 20px 20px; - [data-theme="dark"] & { - filter: invert(1); - } +/* TOC */ +.table-of-contents { + padding-left: 1.5rem; + font-size: 0.9rem; + line-height: 1rem; + .table-of-contents__link--active { + font-weight: 600; } } +/* Search */ + [data-theme="light"] .DocSearch { /* --docsearch-primary-color: var(--ifm-color-primary); */ /* --docsearch-text-color: var(--ifm-font-color-base); */ @@ -285,18 +343,3 @@ div[class^="announcementBar"] { --docsearch-footer-background: var(--ifm-background-surface-color); --docsearch-key-gradient: linear-gradient(-26.5deg, var(--ifm-color-emphasis-200) 0%, var(--ifm-color-emphasis-100) 100%); } - -.theme-doc-sidebar-item-category-level-1 > div > a:first-child { - color: var(--ifm-navbar-link-color); - font-size: 17px; -} - -.theme-doc-sidebar-item-category-level-1 > div > a.menu__link--active { - color: var(--ifm-menu-color-active); - font-size: 17px; -} - -/* Increase padding for levels greater than 1 */ -[class^="theme-doc-sidebar-item"][class*="-level-"]:not(.theme-doc-sidebar-item-category-level-1) { - padding-left: 8px; -} diff --git a/docs-website/static/img/acryl-logo-transparent-mark.svg b/docs-website/static/img/acryl-logo-transparent-mark.svg new file mode 100644 index 00000000000000..87c9904baaf8c0 --- /dev/null +++ b/docs-website/static/img/acryl-logo-transparent-mark.svg @@ -0,0 +1 @@ + \ No newline at end of file