Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Oct 21, 2024
2 parents 2543bfa + 554288b commit 1da4ae9
Show file tree
Hide file tree
Showing 33 changed files with 1,087 additions and 360 deletions.
11 changes: 10 additions & 1 deletion datahub-web-react/src/app/ingest/source/builder/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import csvLogo from '../../../../images/csv-logo.png';
import qlikLogo from '../../../../images/qliklogo.png';
import sigmaLogo from '../../../../images/sigmalogo.png';
import sacLogo from '../../../../images/saclogo.svg';
import datahubLogo from '../../../../images/datahublogo.png';

export const ATHENA = 'athena';
export const ATHENA_URN = `urn:li:dataPlatform:${ATHENA}`;
Expand Down Expand Up @@ -125,6 +126,11 @@ export const SIGMA = 'sigma';
export const SIGMA_URN = `urn:li:dataPlatform:${SIGMA}`;
export const SAC = 'sac';
export const SAC_URN = `urn:li:dataPlatform:${SAC}`;
export const DATAHUB = 'datahub';
export const DATAHUB_GC = 'datahub-gc';
export const DATAHUB_LINEAGE_FILE = 'datahub-lineage-file';
export const DATAHUB_BUSINESS_GLOSSARY = 'datahub-business-glossary';
export const DATAHUB_URN = `urn:li:dataPlatform:${DATAHUB}`;

export const PLATFORM_URN_TO_LOGO = {
[ATHENA_URN]: athenaLogo,
Expand Down Expand Up @@ -165,6 +171,7 @@ export const PLATFORM_URN_TO_LOGO = {
[QLIK_SENSE_URN]: qlikLogo,
[SIGMA_URN]: sigmaLogo,
[SAC_URN]: sacLogo,
[DATAHUB_URN]: datahubLogo,
};

export const SOURCE_TO_PLATFORM_URN = {
Expand All @@ -178,5 +185,7 @@ export const SOURCE_TO_PLATFORM_URN = {
[SNOWFLAKE_USAGE]: SNOWFLAKE_URN,
[STARBURST_TRINO_USAGE]: TRINO_URN,
[DBT_CLOUD]: DBT_URN,
[VERTICA]: VERTICA_URN,
[DATAHUB_GC]: DATAHUB_URN,
[DATAHUB_LINEAGE_FILE]: DATAHUB_URN,
[DATAHUB_BUSINESS_GLOSSARY]: DATAHUB_URN,
};
1 change: 0 additions & 1 deletion docs/businessattributes.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ Taking the example of "United States- Social Security Number", if an application
What you need to create/update and associate business attributes to dataset schema field

* **Manage Business Attributes** platform privilege to create/update/delete business attributes.
* **Edit Dataset Column Business Attribute** metadata privilege to associate business attributes to dataset schema field.

## Using Business Attributes
As of now Business Attributes can only be created through UI
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,13 @@
TableSchemaMetadataValue,
)
from dagster._core.execution.stats import RunStepKeyStatsSnapshot, StepEventStatus
from dagster._core.snap import JobSnapshot

try:
from dagster._core.snap import JobSnapshot # type: ignore[attr-defined]
except ImportError:
# Import changed since Dagster 1.8.12 to this -> https://github.com/dagster-io/dagster/commit/29a37d1f0260cfd112849633d1096ffc916d6c95
from dagster._core.snap import JobSnap as JobSnapshot

from dagster._core.snap.node import OpDefSnap
from dagster._core.storage.dagster_run import DagsterRun, DagsterRunStatsSnapshot
from datahub.api.entities.datajob import DataFlow, DataJob
Expand Down
28 changes: 15 additions & 13 deletions metadata-ingestion/docs/transformer/dataset_transformer.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,12 +122,13 @@ transformers:
```
## Simple Add Dataset ownership
### Config Details
| Field | Required | Type | Default | Description |
|--------------------|----------|--------------|-------------|---------------------------------------------------------------------|
| `owner_urns` | ✅ | list[string] | | List of owner urns. |
| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) |
| `replace_existing` | | boolean | `false` | Whether to remove ownership from entity sent by ingestion source. |
| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. |
| Field | Required | Type | Default | Description |
|--------------------|----------|--------------|-------------|------------------------------------------------------------------------------------------------------------|
| `owner_urns` | ✅ | list[string] | | List of owner urns. |
| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) |
| `replace_existing` | | boolean | `false` | Whether to remove ownership from entity sent by ingestion source. |
| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. |
| `on_conflict` | | enum | `DO_UPDATE` | Whether to make changes if domains already exist. If set to DO_NOTHING, `semantics` setting is irrelevant. |

For transformer behaviour on `replace_existing` and `semantics`, please refer section [Relationship Between replace_existing And semantics](#relationship-between-replace_existing-and-semantics).

Expand Down Expand Up @@ -191,13 +192,14 @@ transformers:

## Pattern Add Dataset ownership
### Config Details
| Field | Required | Type | Default | Description |
|--------------------|----------|----------------------|-------------|-----------------------------------------------------------------------------------------|
| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. |
| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) |
| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. |
| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. |
| `is_container` | | bool | `false` | Whether to also consider a container or not. If true, then ownership will be attached to both the dataset and its container. |
| Field | Required | Type | Default | Description |
|--------------------|----------|----------------------|-------------|------------------------------------------------------------------------------------------------------------------------------|
| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. |
| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) |
| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. |
| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. |
| `is_container` | | bool | `false` | Whether to also consider a container or not. If true, then ownership will be attached to both the dataset and its container. |
| `on_conflict` | | enum | `DO_UPDATE` | Whether to make changes if domains already exist. If set to DO_NOTHING, `semantics` setting is irrelevant. |

let’s suppose we’d like to append a series of users who we know to own a different dataset from a data source but aren't detected during normal ingestion. To do so, we can use the `pattern_add_dataset_ownership` module that’s included in the ingestion framework. This will match the pattern to `urn` of the dataset and assign the respective owners.

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Dict, Iterable, List, Optional, Union
from typing import Callable, Dict, Iterable, List, Optional, Tuple, Type, Union, cast

from avrogen.dict_wrapper import DictWrapper
from pydantic import BaseModel
Expand All @@ -14,7 +14,14 @@
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.mcp_builder import DatahubKey
from datahub.ingestion.graph.client import DataHubGraph
from datahub.metadata.urns import PlatformResourceUrn
from datahub.metadata.urns import DataPlatformUrn, PlatformResourceUrn, Urn
from datahub.utilities.openapi_utils import OpenAPIGraphClient
from datahub.utilities.search_utils import (
ElasticDocumentQuery,
ElasticsearchQueryBuilder,
LogicalOperator,
SearchField,
)

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -69,71 +76,75 @@ def to_resource_info(self) -> models.PlatformResourceInfoClass:
)


class OpenAPIGraphClient:
class DataPlatformInstanceUrn:
"""
A simple implementation of a URN class for DataPlatformInstance.
Since this is not present in the URN registry, we need to implement it here.
"""

ENTITY_KEY_ASPECT_MAP = {
aspect_type.ASPECT_INFO.get("keyForEntity"): name
for name, aspect_type in models.ASPECT_NAME_MAP.items()
if aspect_type.ASPECT_INFO.get("keyForEntity")
}
@staticmethod
def create_from_id(platform_instance_urn: str) -> Urn:
if platform_instance_urn.startswith("urn:li:platformInstance:"):
string_urn = platform_instance_urn
else:
string_urn = f"urn:li:platformInstance:{platform_instance_urn}"
return Urn.from_string(string_urn)

def __init__(self, graph: DataHubGraph):
self.graph = graph
self.openapi_base = graph._gms_server.rstrip("/") + "/openapi/v3"

def scroll_urns_by_filter(
self,
entity_type: str,
extra_or_filters: List[Dict[str, str]],
extra_and_filters: List[Dict[str, str]] = [],
) -> Iterable[str]:
"""
Scroll through all urns that match the given filters
"""
class UrnSearchField(SearchField):
"""
A search field that supports URN values.
TODO: Move this to search_utils after we make this more generic.
"""

key_aspect = self.ENTITY_KEY_ASPECT_MAP.get(entity_type)
assert key_aspect, f"No key aspect found for entity type {entity_type}"
if extra_or_filters and extra_and_filters:
raise ValueError(
"Only one of extra_or_filters and extra_and_filters should be provided"
)
def __init__(self, field_name: str, urn_value_extractor: Callable[[str], Urn]):
self.urn_value_extractor = urn_value_extractor
super().__init__(field_name)

count = 1000
query = (
" OR ".join(
[
f"{filter['field']}:\"{filter['value']}\""
for filter in extra_or_filters
]
)
if extra_or_filters
else " AND ".join(
[
f"{filter['field']}:\"{filter['value']}\""
for filter in extra_and_filters
]
)
def get_search_value(self, value: str) -> str:
return str(self.urn_value_extractor(value))


class PlatformResourceSearchField(SearchField):
def __init__(self, field_name: str):
super().__init__(field_name)

@classmethod
def from_search_field(
cls, search_field: SearchField
) -> "PlatformResourceSearchField":
# pretends to be a class method, but just returns the input
return search_field # type: ignore


class PlatformResourceSearchFields:
PRIMARY_KEY = PlatformResourceSearchField("primaryKey")
RESOURCE_TYPE = PlatformResourceSearchField("resourceType")
SECONDARY_KEYS = PlatformResourceSearchField("secondaryKeys")
PLATFORM = PlatformResourceSearchField.from_search_field(
UrnSearchField(
field_name="platform.keyword",
urn_value_extractor=DataPlatformUrn.create_from_id,
)
scroll_id = None
while True:
response = self.graph._get_generic(
self.openapi_base + f"/entity/{entity_type.lower()}",
params={
"systemMetadata": "false",
"includeSoftDelete": "false",
"skipCache": "false",
"aspects": [key_aspect],
"scrollId": scroll_id,
"count": count,
"query": query,
},
)
entities = response.get("entities", [])
scroll_id = response.get("scrollId")
for entity in entities:
yield entity["urn"]
if not scroll_id:
break
)
PLATFORM_INSTANCE = PlatformResourceSearchField.from_search_field(
UrnSearchField(
field_name="platformInstance.keyword",
urn_value_extractor=DataPlatformInstanceUrn.create_from_id,
)
)


class ElasticPlatformResourceQuery(ElasticDocumentQuery[PlatformResourceSearchField]):
def __init__(self):
super().__init__()

@classmethod
def create_from(
cls: Type["ElasticPlatformResourceQuery"],
*args: Tuple[Union[str, PlatformResourceSearchField], str],
) -> "ElasticPlatformResourceQuery":
return cast(ElasticPlatformResourceQuery, super().create_from(*args))


class PlatformResource(BaseModel):
Expand All @@ -147,6 +158,12 @@ def remove(
cls,
key: PlatformResourceKey,
) -> "PlatformResource":
"""
Creates a PlatformResource object with the removed status set to True.
Removed PlatformResource objects are used to soft-delete resources from
the graph.
To hard-delete a resource, use the delete method.
"""
return cls(
id=key.id,
removed=True,
Expand Down Expand Up @@ -240,28 +257,38 @@ def from_datahub(

@staticmethod
def search_by_key(
graph_client: DataHubGraph, key: str, primary: bool = True
graph_client: DataHubGraph,
key: str,
primary: bool = True,
is_exact: bool = True,
) -> Iterable["PlatformResource"]:
extra_or_filters = []
extra_or_filters.append(
{
"field": "primaryKey",
"condition": "EQUAL",
"value": key,
}
"""
Searches for PlatformResource entities by primary or secondary key.
:param graph_client: DataHubGraph client
:param key: The key to search for
:param primary: Whether to search for primary only or expand the search
to secondary keys (default: True)
:param is_exact: Whether to search for an exact match (default: True)
:return: An iterable of PlatformResource objects
"""

elastic_platform_resource_group = (
ElasticPlatformResourceQuery.create_from()
.group(LogicalOperator.OR)
.add_field_match(
PlatformResourceSearchFields.PRIMARY_KEY, key, is_exact=is_exact
)
)
if not primary: # we expand the search to secondary keys
extra_or_filters.append(
{
"field": "secondaryKeys",
"condition": "EQUAL",
"value": key,
}
elastic_platform_resource_group.add_field_match(
PlatformResourceSearchFields.SECONDARY_KEYS, key, is_exact=is_exact
)
query = elastic_platform_resource_group.end()
openapi_client = OpenAPIGraphClient(graph_client)
for urn in openapi_client.scroll_urns_by_filter(
entity_type="platformResource",
extra_or_filters=extra_or_filters,
query=query,
):
platform_resource = PlatformResource.from_datahub(graph_client, urn)
if platform_resource:
Expand All @@ -273,18 +300,16 @@ def delete(self, graph_client: DataHubGraph, hard: bool = True) -> None:
@staticmethod
def search_by_filters(
graph_client: DataHubGraph,
and_filters: List[Dict[str, str]] = [],
or_filters: List[Dict[str, str]] = [],
query: Union[
ElasticPlatformResourceQuery,
ElasticDocumentQuery,
ElasticsearchQueryBuilder,
],
) -> Iterable["PlatformResource"]:
if and_filters and or_filters:
raise ValueError(
"Only one of and_filters and or_filters should be provided"
)
openapi_client = OpenAPIGraphClient(graph_client)
for urn in openapi_client.scroll_urns_by_filter(
entity_type="platformResource",
extra_or_filters=or_filters if or_filters else [],
extra_and_filters=and_filters if and_filters else [],
query=query,
):
platform_resource = PlatformResource.from_datahub(graph_client, urn)
if platform_resource:
Expand Down
1 change: 1 addition & 0 deletions metadata-ingestion/src/datahub/ingestion/graph/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,7 @@ def get_tags(self, entity_urn: str) -> Optional[GlobalTagsClass]:
def get_glossary_terms(self, entity_urn: str) -> Optional[GlossaryTermsClass]:
return self.get_aspect(entity_urn=entity_urn, aspect_type=GlossaryTermsClass)

@functools.lru_cache(maxsize=1)
def get_domain(self, entity_urn: str) -> Optional[DomainsClass]:
return self.get_aspect(entity_urn=entity_urn, aspect_type=DomainsClass)

Expand Down
Loading

0 comments on commit 1da4ae9

Please sign in to comment.